summaryrefslogtreecommitdiff
path: root/thirdparty/vulkan/include
diff options
context:
space:
mode:
Diffstat (limited to 'thirdparty/vulkan/include')
-rw-r--r--thirdparty/vulkan/include/vulkan/vk_icd.h49
-rw-r--r--thirdparty/vulkan/include/vulkan/vk_layer.h16
-rw-r--r--thirdparty/vulkan/include/vulkan/vk_platform.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan.h25
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan.hpp70090
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_android.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_beta.h459
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_core.h2987
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_directfb.h54
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_ggp.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_ios.h16
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_macos.h16
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_metal.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_vi.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_wayland.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_win32.h17
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_xcb.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_xlib.h14
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h14
20 files changed, 47758 insertions, 26111 deletions
diff --git a/thirdparty/vulkan/include/vulkan/vk_icd.h b/thirdparty/vulkan/include/vulkan/vk_icd.h
index 5dff59a16e..fde5bf6214 100644
--- a/thirdparty/vulkan/include/vulkan/vk_icd.h
+++ b/thirdparty/vulkan/include/vulkan/vk_icd.h
@@ -41,17 +41,45 @@
// that if the loader is older, it should automatically fail a
// call for any API version > 1.0. Otherwise, the loader will
// manually determine if it can support the expected version.
-#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5
+// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices.
+#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6
#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0
#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4
-typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
+// Old typedefs that don't follow a proper naming convention but are preserved for compatibility
+typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this
// file directly, it won't be found.
#ifndef PFN_GetPhysicalDeviceProcAddr
typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName);
#endif
+// Typedefs for loader/ICD interface
+typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID,
+ uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+#endif
+
+// Prototypes for loader/ICD interface
+#if !defined(VK_NO_PROTOTYPES)
+#ifdef __cplusplus
+extern "C" {
+#endif
+ VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion);
+ VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName);
+ VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName);
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+ VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID,
+ uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+#endif
+#ifdef __cplusplus
+}
+#endif
+#endif
+
/*
* The ICD must reserve space for a pointer for the loader's dispatch
* table, at the start of <each object>.
@@ -91,6 +119,8 @@ typedef enum {
VK_ICD_WSI_PLATFORM_DISPLAY,
VK_ICD_WSI_PLATFORM_HEADLESS,
VK_ICD_WSI_PLATFORM_METAL,
+ VK_ICD_WSI_PLATFORM_DIRECTFB,
+ VK_ICD_WSI_PLATFORM_VI,
} VkIcdWsiPlatform;
typedef struct {
@@ -137,6 +167,14 @@ typedef struct {
} VkIcdSurfaceXlib;
#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+typedef struct {
+ VkIcdSurfaceBase base;
+ IDirectFB *dfb;
+ IDirectFBSurface *surface;
+} VkIcdSurfaceDirectFB;
+#endif // VK_USE_PLATFORM_DIRECTFB_EXT
+
#ifdef VK_USE_PLATFORM_ANDROID_KHR
typedef struct {
VkIcdSurfaceBase base;
@@ -180,4 +218,11 @@ typedef struct {
} VkIcdSurfaceMetal;
#endif // VK_USE_PLATFORM_METAL_EXT
+#ifdef VK_USE_PLATFORM_VI_NN
+typedef struct {
+ VkIcdSurfaceBase base;
+ void *window;
+} VkIcdSurfaceVi;
+#endif // VK_USE_PLATFORM_VI_NN
+
#endif // VKICD_H
diff --git a/thirdparty/vulkan/include/vulkan/vk_layer.h b/thirdparty/vulkan/include/vulkan/vk_layer.h
index fa76520089..0651870c70 100644
--- a/thirdparty/vulkan/include/vulkan/vk_layer.h
+++ b/thirdparty/vulkan/include/vulkan/vk_layer.h
@@ -83,7 +83,8 @@ typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device);
typedef enum VkLayerFunction_ {
VK_LAYER_LINK_INFO = 0,
VK_LOADER_DATA_CALLBACK = 1,
- VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2
+ VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2,
+ VK_LOADER_FEATURES = 3,
} VkLayerFunction;
typedef struct VkLayerInstanceLink_ {
@@ -111,6 +112,12 @@ typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device,
typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA);
typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction);
+
+typedef enum VkLoaderFeastureFlagBits {
+ VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001,
+} VkLoaderFlagBits;
+typedef VkFlags VkLoaderFeatureFlags;
+
typedef struct {
VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
const void *pNext;
@@ -119,9 +126,10 @@ typedef struct {
VkLayerInstanceLink *pLayerInfo;
PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData;
struct {
- PFN_vkLayerCreateDevice pfnLayerCreateDevice;
- PFN_vkLayerDestroyDevice pfnLayerDestroyDevice;
- } layerDevice;
+ PFN_vkLayerCreateDevice pfnLayerCreateDevice;
+ PFN_vkLayerDestroyDevice pfnLayerDestroyDevice;
+ } layerDevice;
+ VkLoaderFeatureFlags loaderFeatures;
} u;
} VkLayerInstanceCreateInfo;
diff --git a/thirdparty/vulkan/include/vulkan/vk_platform.h b/thirdparty/vulkan/include/vulkan/vk_platform.h
index 7289299240..048322d93b 100644
--- a/thirdparty/vulkan/include/vulkan/vk_platform.h
+++ b/thirdparty/vulkan/include/vulkan/vk_platform.h
@@ -2,19 +2,9 @@
// File: vk_platform.h
//
/*
-** Copyright (c) 2014-2017 The Khronos Group Inc.
+** Copyright (c) 2014-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
diff --git a/thirdparty/vulkan/include/vulkan/vulkan.h b/thirdparty/vulkan/include/vulkan/vulkan.h
index 5f853f9fc8..b7716ec8ea 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan.h
@@ -2,19 +2,9 @@
#define VULKAN_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
#include "vk_platform.h"
@@ -71,6 +61,12 @@
#endif
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+#include <directfb.h>
+#include "vulkan_directfb.h"
+#endif
+
+
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
#include <X11/Xlib.h>
#include <X11/extensions/Xrandr.h>
@@ -83,4 +79,9 @@
#include "vulkan_ggp.h"
#endif
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#include "vulkan_beta.h"
+#endif
+
#endif // VULKAN_H_
diff --git a/thirdparty/vulkan/include/vulkan/vulkan.hpp b/thirdparty/vulkan/include/vulkan/vulkan.hpp
index 441869898f..68b42afe19 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan.hpp
@@ -1,32 +1,6 @@
// Copyright (c) 2015-2020 The Khronos Group Inc.
//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-// ---- Exceptions to the Apache 2.0 License: ----
-//
-// As an exception, if you use this Software to generate code and portions of
-// this Software are embedded into the generated code as a result, you may
-// redistribute such product without providing attribution as would otherwise
-// be required by Sections 4(a), 4(b) and 4(d) of the License.
-//
-// In addition, if you combine or link code generated by this Software with
-// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
-// ("`Combined Software`") and if a court of competent jurisdiction determines
-// that the patent provision (Section 3), the indemnity provision (Section 9)
-// or other Section of the License conflicts with the conditions of the
-// applicable GPL or LGPL license, you may retroactively and prospectively
-// choose to deem waived or otherwise exclude such Section(s) of the License,
-// but only in their entirety and only with respect to the Combined Software.
+// SPDX-License-Identifier: Apache-2.0 OR MIT
//
// This header is generated from the Khronos Vulkan XML API Registry.
@@ -34,11 +8,30 @@
#ifndef VULKAN_HPP
#define VULKAN_HPP
+#if defined( _MSVC_LANG )
+# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
+#else
+# define VULKAN_HPP_CPLUSPLUS __cplusplus
+#endif
+
+#if 201703L < VULKAN_HPP_CPLUSPLUS
+# define VULKAN_HPP_CPP_VERSION 20
+#elif 201402L < VULKAN_HPP_CPLUSPLUS
+# define VULKAN_HPP_CPP_VERSION 17
+#elif 201103L < VULKAN_HPP_CPLUSPLUS
+# define VULKAN_HPP_CPP_VERSION 14
+#elif 199711L < VULKAN_HPP_CPLUSPLUS
+# define VULKAN_HPP_CPP_VERSION 11
+#else
+# error "vulkan.hpp needs at least c++ standard version 11"
+#endif
+
#include <algorithm>
#include <array>
#include <cstddef>
#include <cstdint>
#include <cstring>
+#include <functional>
#include <initializer_list>
#include <string>
#include <system_error>
@@ -46,7 +39,15 @@
#include <type_traits>
#include <vulkan/vulkan.h>
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#if 17 <= VULKAN_HPP_CPP_VERSION
+#include <string_view>
+#endif
+
+#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+# if !defined(VULKAN_HPP_NO_SMART_HANDLE)
+# define VULKAN_HPP_NO_SMART_HANDLE
+# endif
+#else
# include <memory>
# include <vector>
#endif
@@ -56,21 +57,39 @@
# define VULKAN_HPP_ASSERT assert
#endif
+#if !defined(VULKAN_HPP_ASSERT_ON_RESULT)
+# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
+#endif
+
#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
#endif
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
-# if defined(__linux__) || defined(__APPLE__)
-# include <dlfcn.h>
+# if defined( __linux__ ) || defined( __APPLE__ )
+# include <dlfcn.h>
+# elif defined( _WIN32 )
+typedef struct HINSTANCE__ * HINSTANCE;
+# if defined( _WIN64 )
+typedef int64_t( __stdcall * FARPROC )();
+# else
+typedef int( __stdcall * FARPROC )();
+# endif
+extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
+extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
+extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
# endif
+#endif
-# if defined(_WIN32)
-# include <windows.h>
-# endif
+#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( <compare> )
+# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
#endif
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+# include <compare>
+#endif
+
-static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 154 , "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -158,6 +177,24 @@ static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" );
# endif
#endif
+#if 14 <= VULKAN_HPP_CPP_VERSION
+# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
+#else
+# define VULKAN_HPP_DEPRECATED( msg )
+#endif
+
+#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
+# define VULKAN_HPP_NODISCARD [[nodiscard]]
+# if defined(VULKAN_HPP_NO_EXCEPTIONS)
+# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
+# else
+# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+# endif
+#else
+# define VULKAN_HPP_NODISCARD
+# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#endif
+
#if !defined(VULKAN_HPP_NAMESPACE)
#define VULKAN_HPP_NAMESPACE vk
#endif
@@ -168,55 +205,276 @@ static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" );
namespace VULKAN_HPP_NAMESPACE
{
+
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
template <typename T>
class ArrayProxy
{
public:
- VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT
- : m_count(0)
- , m_ptr(nullptr)
+ VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {}
+
+ ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT
+ : m_count( 1 )
+ , m_ptr( &value )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+ : m_count( 1 )
+ , m_ptr( &value )
+ {}
+
+ ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
+ {}
+
+ ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {}
+
+ ArrayProxy( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {}
+
+ template <size_t N>
+ ArrayProxy( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
+ {}
+
+ template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
+ {}
+
+ template <size_t N>
+ ArrayProxy( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
+ {}
+
+ template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+ ArrayProxy( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+ typename B = T,
+ typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+ ArrayProxy( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+ typename B = T,
+ typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
+ {}
+
+ const T * begin() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr;
+ }
+
+ const T * end() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr + m_count;
+ }
+
+ const T & front() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *m_ptr;
+ }
+
+ const T & back() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *( m_ptr + m_count - 1 );
+ }
+
+ bool empty() const VULKAN_HPP_NOEXCEPT
+ {
+ return ( m_count == 0 );
+ }
+
+ uint32_t size() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_count;
+ }
+
+ T * data() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr;
+ }
+
+ private:
+ uint32_t m_count;
+ T * m_ptr;
+ };
+
+ template <typename T>
+ class ArrayProxyNoTemporaries
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+ : m_count( 1 )
+ , m_ptr( &value )
+ {}
+
+ ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
{}
- ArrayProxy(typename std::remove_reference<T>::type & ptr) VULKAN_HPP_NOEXCEPT
- : m_count(1)
- , m_ptr(&ptr)
+ ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
{}
- ArrayProxy(uint32_t count, T * ptr) VULKAN_HPP_NOEXCEPT
- : m_count(count)
- , m_ptr(ptr)
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
{}
+ ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {}
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {}
+
+ ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) VULKAN_HPP_NOEXCEPT = delete;
+ ArrayProxyNoTemporaries( std::initializer_list<T> && list ) VULKAN_HPP_NOEXCEPT = delete;
+
template <size_t N>
- ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data) VULKAN_HPP_NOEXCEPT
- : m_count(N)
- , m_ptr(data.data())
+ ArrayProxyNoTemporaries( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
+ {}
+
+ template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
{}
template <size_t N>
- ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data) VULKAN_HPP_NOEXCEPT
- : m_count(N)
- , m_ptr(data.data())
+ ArrayProxyNoTemporaries( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
{}
+ template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( N )
+ , m_ptr( data.data() )
+ {}
+
+ template <size_t N>
+ ArrayProxyNoTemporaries( std::array<T, N> const && data ) VULKAN_HPP_NOEXCEPT = delete;
+ template <size_t N>
+ ArrayProxyNoTemporaries( std::array<T, N> && data ) VULKAN_HPP_NOEXCEPT = delete;
+
template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
- ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data) VULKAN_HPP_NOEXCEPT
- : m_count(static_cast<uint32_t>(data.size()))
- , m_ptr(data.data())
+ ArrayProxyNoTemporaries( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+ typename B = T,
+ typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
{}
template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
- ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data) VULKAN_HPP_NOEXCEPT
- : m_count(static_cast<uint32_t>(data.size()))
- , m_ptr(data.data())
+ ArrayProxyNoTemporaries( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
{}
- ArrayProxy(std::initializer_list<typename std::remove_reference<T>::type> const& data) VULKAN_HPP_NOEXCEPT
- : m_count(static_cast<uint32_t>(data.end() - data.begin()))
- , m_ptr(data.begin())
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+ typename B = T,
+ typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( data.size() ) )
+ , m_ptr( data.data() )
{}
+ ArrayProxyNoTemporaries( std::vector<T> const && data ) VULKAN_HPP_NOEXCEPT = delete;
+ ArrayProxyNoTemporaries( std::vector<T> && data ) VULKAN_HPP_NOEXCEPT = delete;
+
const T * begin() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
@@ -229,19 +487,19 @@ namespace VULKAN_HPP_NAMESPACE
const T & front() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(m_count && m_ptr);
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
return *m_ptr;
}
const T & back() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(m_count && m_ptr);
- return *(m_ptr + m_count - 1);
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *( m_ptr + m_count - 1 );
}
bool empty() const VULKAN_HPP_NOEXCEPT
{
- return (m_count == 0);
+ return ( m_count == 0 );
}
uint32_t size() const VULKAN_HPP_NOEXCEPT
@@ -255,20 +513,157 @@ namespace VULKAN_HPP_NAMESPACE
}
private:
- uint32_t m_count;
- T * m_ptr;
+ uint32_t m_count;
+ T * m_ptr;
};
#endif
+ template <typename T, size_t N>
+ class ArrayWrapper1D : public std::array<T,N>
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT
+ : std::array<T, N>()
+ {}
+
+ VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array<T,N> const& data) VULKAN_HPP_NOEXCEPT
+ : std::array<T, N>(data)
+ {}
+
+#if defined(_WIN32) && !defined(_WIN64)
+ VULKAN_HPP_CONSTEXPR T const& operator[](int index) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::array<T, N>::operator[](index);
+ }
+
+ VULKAN_HPP_CONSTEXPR T & operator[](int index) VULKAN_HPP_NOEXCEPT
+ {
+ return std::array<T, N>::operator[](index);
+ }
+#endif
+
+ operator T const* () const VULKAN_HPP_NOEXCEPT
+ {
+ return this->data();
+ }
+
+ operator T * () VULKAN_HPP_NOEXCEPT
+ {
+ return this->data();
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ operator std::string() const
+ {
+ return std::string( this->data() );
+ }
+
+#if 17 <= VULKAN_HPP_CPP_VERSION
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ operator std::string_view() const
+ {
+ return std::string_view( this->data() );
+ }
+#endif
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ bool operator>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ bool operator>=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ bool operator==( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ bool operator!=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
+ }
+ };
+
+ // specialization of relational operators between std::string and arrays of chars
+ template <size_t N>
+ bool operator<(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ return lhs < rhs.data();
+ }
+
+ template <size_t N>
+ bool operator<=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ return lhs <= rhs.data();
+ }
+
+ template <size_t N>
+ bool operator>(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ return lhs > rhs.data();
+ }
+
+ template <size_t N>
+ bool operator>=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ return lhs >= rhs.data();
+ }
+
+ template <size_t N>
+ bool operator==(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ return lhs == rhs.data();
+ }
+
+ template <size_t N>
+ bool operator!=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ return lhs != rhs.data();
+ }
+
+ template <typename T, size_t N, size_t M>
+ class ArrayWrapper2D : public std::array<ArrayWrapper1D<T,M>,N>
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT
+ : std::array<ArrayWrapper1D<T,M>, N>()
+ {}
+
+ VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array<std::array<T,M>,N> const& data) VULKAN_HPP_NOEXCEPT
+ : std::array<ArrayWrapper1D<T,M>, N>(*reinterpret_cast<std::array<ArrayWrapper1D<T,M>,N> const*>(&data))
+ {}
+ };
+
template <typename FlagBitsType> struct FlagTraits
{
enum { allFlags = 0 };
};
- template <typename BitType, typename MaskType = VkFlags>
+ template <typename BitType>
class Flags
{
public:
+ using MaskType = typename std::underlying_type<BitType>::type;
+
// constructors
VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
: m_mask(0)
@@ -278,7 +673,7 @@ namespace VULKAN_HPP_NAMESPACE
: m_mask(static_cast<MaskType>(bit))
{}
- VULKAN_HPP_CONSTEXPR Flags(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
: m_mask(rhs.m_mask)
{}
@@ -287,35 +682,39 @@ namespace VULKAN_HPP_NAMESPACE
{}
// relational operators
- VULKAN_HPP_CONSTEXPR bool operator<(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>(Flags<BitType> const&) const = default;
+#else
+ VULKAN_HPP_CONSTEXPR bool operator<(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask < rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR bool operator<=(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator<=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask <= rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR bool operator>(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator>(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask > rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR bool operator>=(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator>=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask >= rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask == rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask != rhs.m_mask;
}
+#endif
// logical operator
VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
@@ -324,46 +723,46 @@ namespace VULKAN_HPP_NAMESPACE
}
// bitwise operators
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator&(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType, MaskType>(m_mask & rhs.m_mask);
+ return Flags<BitType>(m_mask & rhs.m_mask);
}
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator|(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType, MaskType>(m_mask | rhs.m_mask);
+ return Flags<BitType>(m_mask | rhs.m_mask);
}
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator^(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType, MaskType>(m_mask ^ rhs.m_mask);
+ return Flags<BitType>(m_mask ^ rhs.m_mask);
}
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator~() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType, MaskType>(m_mask ^ FlagTraits<BitType>::allFlags);
+ return Flags<BitType>(m_mask ^ FlagTraits<BitType>::allFlags);
}
// assignment operators
- Flags<BitType, MaskType> & operator=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
{
m_mask = rhs.m_mask;
return *this;
}
- Flags<BitType, MaskType> & operator|=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator|=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
{
m_mask |= rhs.m_mask;
return *this;
}
- Flags<BitType, MaskType> & operator&=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator&=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
{
m_mask &= rhs.m_mask;
return *this;
}
- Flags<BitType, MaskType> & operator^=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator^=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
{
m_mask ^= rhs.m_mask;
return *this;
@@ -384,58 +783,60 @@ namespace VULKAN_HPP_NAMESPACE
MaskType m_mask;
};
- // relational operators
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ // relational operators only needed for pre C++20
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags > bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags >= bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags < bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags <= bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags == bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags != bit;
}
+#endif
// bitwise operators
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator&(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags & bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator|(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags | bit;
}
- template <typename BitType, typename MaskType = VkFlags>
- VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator^(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType>
+ VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags ^ bit;
}
@@ -456,181 +857,227 @@ namespace VULKAN_HPP_NAMESPACE
RefType *m_ptr;
};
- template <typename X, typename Y> struct isStructureChainValid { enum { value = false }; };
+ template <typename X, typename Y> struct StructExtends { enum { value = false }; };
- template <typename P, typename T>
- struct TypeList
- {
- using list = P;
- using last = T;
- };
-
- template <typename List, typename X>
- struct extendCheck
+ template<typename Type, class...>
+ struct IsPartOfStructureChain
{
- static const bool valid = isStructureChainValid<typename List::last, X>::value || extendCheck<typename List::list,X>::valid;
+ static const bool valid = false;
};
- template <typename T, typename X>
- struct extendCheck<TypeList<void,T>,X>
+ template<typename Type, typename Head, typename... Tail>
+ struct IsPartOfStructureChain<Type, Head, Tail...>
{
- static const bool valid = isStructureChainValid<T, X>::value;
+ static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
};
- template <typename X>
- struct extendCheck<void,X>
+ template <size_t Index, typename T, typename... ChainElements>
+ struct StructureChainContains
{
- static const bool valid = true;
+ static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
+ StructureChainContains<Index - 1, T, ChainElements...>::value;
};
- template<typename Type, class...>
- struct isPartOfStructureChain
+ template <typename T, typename... ChainElements>
+ struct StructureChainContains<0, T, ChainElements...>
{
- static const bool valid = false;
+ static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
};
- template<typename Type, typename Head, typename... Tail>
- struct isPartOfStructureChain<Type, Head, Tail...>
+ template <size_t Index, typename... ChainElements>
+ struct StructureChainValidation
{
- static const bool valid = std::is_same<Type, Head>::value || isPartOfStructureChain<Type, Tail...>::valid;
+ using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
+ static const bool valid =
+ StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+ ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
+ StructureChainValidation<Index - 1, ChainElements...>::valid;
};
- template <class Element>
- class StructureChainElement
+ template <typename... ChainElements>
+ struct StructureChainValidation<0, ChainElements...>
{
- public:
- explicit operator Element&() VULKAN_HPP_NOEXCEPT { return value; }
- explicit operator const Element&() const VULKAN_HPP_NOEXCEPT { return value; }
- private:
- Element value;
+ static const bool valid = true;
};
- template<typename ...StructureElements>
- class StructureChain : private StructureChainElement<StructureElements>...
+ template <typename... ChainElements>
+ class StructureChain : public std::tuple<ChainElements...>
{
public:
StructureChain() VULKAN_HPP_NOEXCEPT
{
- link<void, StructureElements...>();
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+ "The structure chain is not valid!" );
+ link<sizeof...( ChainElements ) - 1>();
}
- StructureChain(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+ StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
{
- linkAndCopy<void, StructureElements...>(rhs);
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+ "The structure chain is not valid!" );
+ link<sizeof...( ChainElements ) - 1>();
}
- StructureChain(StructureElements const &... elems) VULKAN_HPP_NOEXCEPT
+ StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
+ : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
{
- linkAndCopyElements<void, StructureElements...>(elems...);
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+ "The structure chain is not valid!" );
+ link<sizeof...( ChainElements ) - 1>();
}
- StructureChain& operator=(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+ StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
{
- linkAndCopy<void, StructureElements...>(rhs);
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+ "The structure chain is not valid!" );
+ link<sizeof...( ChainElements ) - 1>();
+ }
+
+ StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::tuple<ChainElements...>::operator=( rhs );
+ link<sizeof...( ChainElements ) - 1>();
return *this;
}
- template<typename ClassType> ClassType& get() VULKAN_HPP_NOEXCEPT { return static_cast<ClassType&>(*this);}
+ StructureChain & operator=( StructureChain && rhs ) = delete;
- template<typename ClassTypeA, typename ClassTypeB, typename ...ClassTypes>
- std::tuple<ClassTypeA, ClassTypeB, ClassTypes...> get()
+ template <typename T, size_t Which = 0>
+ T & get() VULKAN_HPP_NOEXCEPT
{
- return std::tuple_cat(
- std::make_tuple(get<ClassTypeA>(),get<ClassTypeB>()),
- std::make_tuple(get<ClassTypes>()...)
- );
+ return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( *this );
}
- template<typename ClassType>
- void unlink() VULKAN_HPP_NOEXCEPT
+ template <typename T, size_t Which = 0>
+ T const & get() const VULKAN_HPP_NOEXCEPT
{
- static_assert(isPartOfStructureChain<ClassType, StructureElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!");
- static_assert(!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<StructureElements...>>::type>::value, "It's not allowed to unlink the first element!");
- VkBaseOutStructure * ptr = reinterpret_cast<VkBaseOutStructure*>(&get<ClassType>());
- assert(ptr != nullptr);
- VkBaseOutStructure ** ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
- assert(*ppNext != nullptr);
- while (*ppNext != ptr)
- {
- ppNext = &(*ppNext)->pNext;
- assert(*ppNext != nullptr); // fires, if the ClassType member has already been unlinked !
- }
- assert(*ppNext == ptr);
- *ppNext = (*ppNext)->pNext;
+ return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( *this );
}
- template <typename ClassType>
- void relink() VULKAN_HPP_NOEXCEPT
+ template <typename T0, typename T1, typename... Ts>
+ std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
{
- static_assert(isPartOfStructureChain<ClassType, StructureElements...>::valid, "Can't relink Structure that's not part of this StructureChain!");
- static_assert(!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<StructureElements...>>::type>::value, "It's not allowed to have the first element unlinked!");
- VkBaseOutStructure * ptr = reinterpret_cast<VkBaseOutStructure*>(&get<ClassType>());
- assert(ptr != nullptr);
- VkBaseOutStructure ** ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
- assert(*ppNext != nullptr);
-#if !defined(NDEBUG)
- while (*ppNext)
- {
- assert(*ppNext != ptr); // fires, if the ClassType member has not been unlinked before
- ppNext = &(*ppNext)->pNext;
- }
- ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
-#endif
- ptr->pNext = *ppNext;
- *ppNext = ptr;
+ return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
}
- private:
- template<typename List, typename X>
- void link() VULKAN_HPP_NOEXCEPT
+ template <typename T0, typename T1, typename... Ts>
+ std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
{
- static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+ return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
}
- template<typename List, typename X, typename Y, typename ...Z>
- void link() VULKAN_HPP_NOEXCEPT
+ template <typename ClassType, size_t Which = 0>
+ void relink() VULKAN_HPP_NOEXCEPT
{
- static_assert(extendCheck<List,X>::valid, "The structure chain is not valid!");
- X& x = static_cast<X&>(*this);
- Y& y = static_cast<Y&>(*this);
- x.pNext = &y;
- link<TypeList<List, X>, Y, Z...>();
+ static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+ "Can't relink Structure that's not part of this StructureChain!" );
+ static_assert(
+ !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0),
+ "It's not allowed to have the first element unlinked!" );
+
+ auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
+ VULKAN_HPP_ASSERT( !isLinked( pNext ) );
+ auto & headElement = std::get<0>( *this );
+ pNext->pNext = reinterpret_cast<VkBaseInStructure const*>(headElement.pNext);
+ headElement.pNext = pNext;
}
- template<typename List, typename X>
- void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+ template <typename ClassType, size_t Which = 0>
+ void unlink() VULKAN_HPP_NOEXCEPT
{
- static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
- static_cast<X&>(*this) = static_cast<X const &>(rhs);
+ static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+ "Can't unlink Structure that's not part of this StructureChain!" );
+ static_assert(
+ !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0),
+ "It's not allowed to unlink the first element!" );
+
+ unlink<sizeof...( ChainElements ) - 1>( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
}
- template<typename List, typename X, typename Y, typename ...Z>
- void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+ private:
+ template <int Index, typename T, int Which, typename, class First, class... Types>
+ struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
+ {};
+
+ template <int Index, typename T, int Which, class First, class... Types>
+ struct ChainElementIndex<Index,
+ T,
+ Which,
+ typename std::enable_if<!std::is_same<T, First>::value, void>::type,
+ First,
+ Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
+ {};
+
+ template <int Index, typename T, int Which, class First, class... Types>
+ struct ChainElementIndex<Index,
+ T,
+ Which,
+ typename std::enable_if<std::is_same<T, First>::value, void>::type,
+ First,
+ Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
+ {};
+
+ template <int Index, typename T, class First, class... Types>
+ struct ChainElementIndex<Index,
+ T,
+ 0,
+ typename std::enable_if<std::is_same<T, First>::value, void>::type,
+ First,
+ Types...> : std::integral_constant<int, Index>
+ {};
+
+ bool isLinked( VkBaseInStructure const * pNext )
+ {
+ VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const*>(&std::get<0>( *this ));
+ while ( elementPtr )
+ {
+ if ( elementPtr->pNext == pNext )
+ {
+ return true;
+ }
+ elementPtr = elementPtr->pNext;
+ }
+ return false;
+ }
+
+ template <size_t Index>
+ typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
{
- static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
- X& x = static_cast<X&>(*this);
- Y& y = static_cast<Y&>(*this);
- x = static_cast<X const &>(rhs);
- x.pNext = &y;
- linkAndCopy<TypeList<List, X>, Y, Z...>(rhs);
+ auto & x = std::get<Index - 1>( *this );
+ x.pNext = &std::get<Index>( *this );
+ link<Index - 1>();
}
- template<typename List, typename X>
- void linkAndCopyElements(X const &xelem) VULKAN_HPP_NOEXCEPT
+ template <size_t Index>
+ typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
+ {}
+
+ template <size_t Index>
+ typename std::enable_if<Index != 0, void>::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
{
- static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
- static_cast<X&>(*this) = xelem;
+ auto & element = std::get<Index>( *this );
+ if ( element.pNext == pNext )
+ {
+ element.pNext = pNext->pNext;
+ }
+ else
+ {
+ unlink<Index - 1>( pNext );
+ }
}
- template<typename List, typename X, typename Y, typename ...Z>
- void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) VULKAN_HPP_NOEXCEPT
+ template <size_t Index>
+ typename std::enable_if<Index == 0, void>::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
{
- static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
- X& x = static_cast<X&>(*this);
- Y& y = static_cast<Y&>(*this);
- x = xelem;
- x.pNext = &y;
- linkAndCopyElements<TypeList<List, X>, Y, Z...>(yelem, zelem...);
+ auto & element = std::get<0>( *this );
+ if ( element.pNext == pNext )
+ {
+ element.pNext = pNext->pNext;
+ }
+ else
+ {
+ VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked !
+ }
}
};
@@ -757,24 +1204,53 @@ namespace VULKAN_HPP_NAMESPACE
class DispatchLoaderStatic
{
public:
- VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
+ return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
}
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
+ return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
}
- VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
+ return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
}
- VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumerateInstanceVersion( pApiVersion );
+ return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
+ }
+
+ VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkAcquireProfilingLockKHR( device, pInfo );
+ }
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+ VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
+ }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+ VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
+ }
+
+ VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
+ }
+
+ VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
}
VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT
@@ -782,6 +1258,55 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindAccelerationStructureMemoryKHR( device, bindInfoCount, pBindInfos );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
+ }
+
+ VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
+ }
+
+ VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
+ }
+
+ VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
+ }
+
+ VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindImageMemory( device, image, memory, memoryOffset );
+ }
+
+ VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
+ }
+
+ VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
+ }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkBuildAccelerationStructureKHR( VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBuildAccelerationStructureKHR( device, infoCount, pInfos, ppOffsetInfos );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
@@ -837,6 +1362,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
}
+ void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
+ }
+
void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
@@ -852,12 +1382,36 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
}
+ void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+ }
+
void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
}
- void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
+ }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBuildAccelerationStructureIndirectKHR( commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdBuildAccelerationStructureKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBuildAccelerationStructureKHR( commandBuffer, infoCount, pInfos, ppOffsetInfos );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
}
@@ -877,31 +1431,72 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
}
- void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
}
+ void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
+ }
+
void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
}
+ void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
+ }
+
void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
+ void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
+ }
+
void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
}
+ void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
+ }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
@@ -1057,6 +1652,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
}
+ void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
+ }
+
void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
@@ -1087,9 +1687,9 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
}
- void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo );
+ return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
}
void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT
@@ -1107,11 +1707,6 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
}
- void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo );
- }
-
void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetEvent( commandBuffer, event, stageMask );
@@ -1127,6 +1722,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
+ void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
+ }
+
void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
@@ -1142,6 +1742,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
}
+ void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCullModeEXT( commandBuffer, cullMode );
+ }
+
void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
@@ -1152,6 +1757,26 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
}
+ void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
+ }
+
+ void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
+ }
+
+ void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
+ }
+
+ void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
+ }
+
void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
@@ -1177,6 +1802,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
}
+ void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
+ }
+
void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
@@ -1202,6 +1832,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
}
+ void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
+ }
+
void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
@@ -1212,16 +1847,31 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
}
+ void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
+ }
+
void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
}
+ void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
+ }
+
void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
}
+ void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
+ }
+
void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
@@ -1242,6 +1892,25 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
}
+ void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
+ }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
@@ -1257,7 +1926,14 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
}
- void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
}
@@ -1272,182 +1948,202 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
}
- VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEndCommandBuffer( commandBuffer );
+ return ::vkCompileDeferredNV( device, pipeline, shader );
}
- VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkCopyAccelerationStructureKHR( VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetCommandBuffer( commandBuffer, flags );
+ return ::vkCopyAccelerationStructureKHR( device, pInfo );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
+ return ::vkCopyAccelerationStructureToMemoryKHR( device, pInfo );
}
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
+ return ::vkCopyMemoryToAccelerationStructureKHR( device, pInfo );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
+ return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
+ return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
}
- VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAcquireProfilingLockKHR( device, pInfo );
+ return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
}
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
+ return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
}
- VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
+ return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
}
- VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
+ return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
}
- VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
+ return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
}
- VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
+ return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
}
- VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
+ return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
}
- VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
+ return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindImageMemory( device, image, memory, memoryOffset );
+ return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
}
- VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
+ return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
}
- VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
+ return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
}
- VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCompileDeferredNV( device, pipeline, shader );
+ return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
}
- VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
+ return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
}
- VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
+ return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
}
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
- VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
+ return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
}
- VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
+ return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
}
- VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
}
- VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
+ return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
}
- VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
+ return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
}
- VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+ return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
}
- VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+ return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
}
- VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
+ return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
}
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
- VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
+ return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
}
- VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_FUCHSIA
+ VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
+ return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
}
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
- VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
}
- VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
+ return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
}
- VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
+ return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
}
- VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
+ return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
}
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_METAL_EXT
+ VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable );
+ return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
}
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT
{
@@ -1459,11 +2155,23 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
}
+ VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
+ }
+
VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateRayTracingPipelinesKHR( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
@@ -1514,6 +2222,13 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
}
+#ifdef VK_USE_PLATFORM_GGP
+ VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
+ }
+#endif /*VK_USE_PLATFORM_GGP*/
+
VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
@@ -1524,6 +2239,41 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
}
+#ifdef VK_USE_PLATFORM_VI_NN
+ VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
+ }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
@@ -1534,7 +2284,26 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
}
- void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
+ }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDeferredOperationJoinKHR( device, operation );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
}
@@ -1554,6 +2323,23 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyCommandPool( device, commandPool, pAllocator );
}
+ void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
+ }
+
+ void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
+ }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
@@ -1604,14 +2390,14 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyImageView( device, imageView, pAllocator );
}
- void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator );
+ return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator );
}
- void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator );
+ return ::vkDestroyInstance( instance, pAllocator );
}
void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
@@ -1629,6 +2415,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
}
+ void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
+ }
+
void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyQueryPool( device, queryPool, pAllocator );
@@ -1664,6 +2455,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
}
+ void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
+ }
+
void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
@@ -1684,6 +2480,56 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
}
+ VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEndCommandBuffer( commandBuffer );
+ }
+
+ VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
+ }
+
+ VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
+ }
+
+ VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
+ }
+
+ VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
+ }
+
+ VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumerateInstanceVersion( pApiVersion );
+ }
+
+ VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+ }
+
+ VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+ }
+
+ VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
+ }
+
+ VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
+ }
+
VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
{
return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
@@ -1704,11 +2550,25 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkFreeMemory( device, memory, pAllocator );
}
- VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ void vkGetAccelerationStructureMemoryRequirementsKHR( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetAccelerationStructureMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
@@ -1766,6 +2626,20 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeferredOperationResultKHR( device, operation );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
@@ -1776,6 +2650,13 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionKHR* version ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, version );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
@@ -1833,6 +2714,31 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
}
+ VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
+ }
+
+ VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
+ }
+
+ VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
+ }
+
+ VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
+ }
+
+ VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
+ }
+
VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetEventStatus( device, event );
@@ -1855,6 +2761,11 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+ }
+
VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
@@ -1895,11 +2806,21 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
}
+ VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
+ }
+
uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageViewHandleNVX( device, pInfo );
}
+ PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetInstanceProcAddr( instance, pName );
+ }
+
#ifdef VK_USE_PLATFORM_ANDROID_KHR
VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT
{
@@ -1953,824 +2874,653 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
}
- VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
- }
-
- VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
- }
-
- VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
- }
-
- VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
- }
-
- VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
- }
-
- VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
- }
-
- VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
- }
-
- void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
- }
-
- VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
- }
-
- VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
- }
-
- VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
- }
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
- }
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
- }
-
- VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
- }
-
- VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
- }
-
- VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetSwapchainStatusKHR( device, swapchain );
- }
-
- VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
- }
-
- VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
- }
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
+ return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
}
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
+ return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
+ return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
}
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
- VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
- }
-
- VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+ return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
}
- VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkMapMemory( device, memory, offset, size, flags, ppData );
+ return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
}
- VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
+ return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
}
- VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
+ return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
}
- VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
+ return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
}
- VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
+ return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
}
- VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices );
+ return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
+ return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
}
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
+ return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
}
- void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkReleaseProfilingLockKHR( device );
+ return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
}
- VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetCommandPool( device, commandPool, flags );
+ return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
}
- VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetDescriptorPool( device, descriptorPool, flags );
+ return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
}
- VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetEvent( device, event );
+ return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
}
- VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetFences( device, fenceCount, pFences );
+ return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
}
- void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
+ return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
}
- void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
+ return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
}
- VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
+ return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
}
- VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
+ return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
}
- VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSetEvent( device, event );
+ return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
}
- void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
+ return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
}
- void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
+ return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
}
- VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSignalSemaphore( device, pSignalInfo );
+ return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
}
- VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSignalSemaphoreKHR( device, pSignalInfo );
+ return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
}
- void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkTrimCommandPool( device, commandPool, flags );
+ return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
}
- void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkTrimCommandPoolKHR( device, commandPool, flags );
+ return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
}
- void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUninitializePerformanceApiINTEL( device );
+ return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
}
- void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUnmapMemory( device, memory );
+ return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
}
- VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices );
+ return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
}
- void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
+ return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
}
- void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
+ return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
}
- void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
+ return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
}
- VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
+ return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
}
- VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkWaitSemaphores( device, pWaitInfo, timeout );
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
}
- VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
}
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
}
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
+ return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
}
- VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
+ return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
}
- VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
}
- VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
}
-#ifdef VK_USE_PLATFORM_IOS_MVK
- VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
}
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-#ifdef VK_USE_PLATFORM_FUCHSIA
- VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
}
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
}
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_METAL_EXT
- VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
}
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
-#ifdef VK_USE_PLATFORM_GGP
- VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
}
-#endif /*VK_USE_PLATFORM_GGP*/
-#ifdef VK_USE_PLATFORM_VI_NN
- VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
}
-#endif /*VK_USE_PLATFORM_VI_NN*/
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
}
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_XCB_KHR
- VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
}
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#ifdef VK_USE_PLATFORM_XLIB_KHR
- VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+ return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
}
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
- void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
- }
-
- void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
- }
-
- void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
- }
-
- void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkDestroyInstance( instance, pAllocator );
+ return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
}
- void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
+ return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
}
- VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+ return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
}
- VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+ return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
}
- VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
+ return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
}
- PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetInstanceProcAddr( instance, pName );
+ return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
}
- void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
+ return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
}
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
+ return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
}
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
- VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
- }
-
- VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
- }
-
- VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
- }
-
- VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
- }
-
- VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
- }
-
- VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
- }
-
- VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
- }
-
- VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
- }
-
- VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
+ return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
- }
-
- VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
+ return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
+ return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
}
- VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+ return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
}
- VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+ return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
}
- VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+ return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
}
- VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+ return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
}
- void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+ return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
}
- void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+ return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
}
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+ return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
}
- void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+ return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
}
- VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
+ return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
}
- void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+ return ::vkGetSwapchainStatusKHR( device, swapchain );
}
- void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+ return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
}
- void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
+ return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
}
- void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
+ return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
}
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
+ return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
}
- void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
+ return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
}
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
+ return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
}
- void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
+ return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
}
- void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits );
+ return ::vkMapMemory( device, memory, offset, size, flags, ppData );
}
- VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
+ return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
}
- VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+ return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
}
- VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+ return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
}
- void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
+ return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
}
- void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
+ return ::vkQueueEndDebugUtilsLabelEXT( queue );
}
- void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
+ return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
}
- void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
+ return ::vkQueuePresentKHR( queue, pPresentInfo );
}
- VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
+ return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
}
- void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
+ return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
}
- void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
+ return ::vkQueueWaitIdle( queue );
}
- void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
+ return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
}
- void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+ return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
}
- void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+ return ::vkReleaseDisplayEXT( physicalDevice, display );
}
- void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+ return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
}
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+ return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
}
- void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
+ return ::vkReleaseProfilingLockKHR( device );
}
- void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+ return ::vkResetCommandBuffer( commandBuffer, flags );
}
- void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+ return ::vkResetCommandPool( device, commandPool, flags );
}
- VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
+ return ::vkResetDescriptorPool( device, descriptorPool, flags );
}
- VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
+ return ::vkResetEvent( device, event );
}
- VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
+ return ::vkResetFences( device, fenceCount, pFences );
}
- VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+ void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
+ return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
}
- VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+ void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
+ return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
}
- VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
+ return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
+ return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
}
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
+ return ::vkSetEvent( device, event );
}
- VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT
+ void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
+ return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
}
- VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
+ return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
}
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
+ return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
}
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
+ return ::vkSignalSemaphore( device, pSignalInfo );
}
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_XCB_KHR
- VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
+ return ::vkSignalSemaphoreKHR( device, pSignalInfo );
}
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
+ void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
+ return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
}
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT
+ void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
+ return ::vkTrimCommandPool( device, commandPool, flags );
}
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
- VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+ void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkReleaseDisplayEXT( physicalDevice, display );
+ return ::vkTrimCommandPoolKHR( device, commandPool, flags );
}
- void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+ void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
+ return ::vkUninitializePerformanceApiINTEL( device );
}
- void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
+ return ::vkUnmapMemory( device, memory );
}
- VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
+ return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
}
- void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+ void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueEndDebugUtilsLabelEXT( queue );
+ return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
}
- void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
+ return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
}
- VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueuePresentKHR( queue, pPresentInfo );
+ return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
}
- VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
+ return ::vkWaitSemaphores( device, pWaitInfo, timeout );
}
- VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
+ return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
}
- VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkQueueWaitIdle( queue );
+ return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
}
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
};
#endif
@@ -2783,11 +3533,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif
#endif
+#if defined(_WIN32) && defined(VULKAN_HPP_STORAGE_SHARED)
+# ifdef VULKAN_HPP_STORAGE_SHARED_EXPORT
+# define VULKAN_HPP_STORAGE_API __declspec( dllexport )
+# else
+# define VULKAN_HPP_STORAGE_API __declspec( dllimport )
+# endif
+#else
+# define VULKAN_HPP_STORAGE_API
+#endif
+
#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER)
# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
-# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
- extern DispatchLoaderDynamic defaultDispatchLoaderDynamic;
+# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
+ extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic;
# else
# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic()
# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
@@ -2802,23 +3562,32 @@ namespace VULKAN_HPP_NAMESPACE
# endif
#endif
+#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
+# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
+# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
+# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
+#else
+# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {}
+# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
+# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER
+#endif
+
struct AllocationCallbacks;
template <typename OwnerType, typename Dispatch>
class ObjectDestroy
{
public:
- ObjectDestroy()
- : m_owner()
- , m_allocationCallbacks( nullptr )
- , m_dispatch( nullptr )
- {}
+ ObjectDestroy() = default;
- ObjectDestroy( OwnerType owner, Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
- : m_owner( owner )
- , m_allocationCallbacks( allocationCallbacks )
- , m_dispatch( &dispatch )
- {}
+ ObjectDestroy( OwnerType owner,
+ Optional<const AllocationCallbacks> allocationCallbacks
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ : m_owner( owner )
+ , m_allocationCallbacks( allocationCallbacks )
+ , m_dispatch( &dispatch )
+ {}
OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
@@ -2827,14 +3596,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void destroy(T t) VULKAN_HPP_NOEXCEPT
{
- assert( m_owner && m_dispatch );
+ VULKAN_HPP_ASSERT( m_owner && m_dispatch );
m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
}
private:
- OwnerType m_owner;
- Optional<const AllocationCallbacks> m_allocationCallbacks;
- Dispatch const* m_dispatch;
+ OwnerType m_owner = {};
+ Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+ Dispatch const * m_dispatch = nullptr;
};
class NoParent;
@@ -2843,15 +3612,13 @@ namespace VULKAN_HPP_NAMESPACE
class ObjectDestroy<NoParent,Dispatch>
{
public:
- ObjectDestroy()
- : m_allocationCallbacks( nullptr )
- , m_dispatch( nullptr )
- {}
+ ObjectDestroy() = default;
- ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
- : m_allocationCallbacks( allocationCallbacks )
- , m_dispatch( &dispatch )
- {}
+ ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks,
+ Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ : m_allocationCallbacks( allocationCallbacks )
+ , m_dispatch( &dispatch )
+ {}
Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
@@ -2859,53 +3626,62 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void destroy(T t) VULKAN_HPP_NOEXCEPT
{
- assert( m_dispatch );
+ VULKAN_HPP_ASSERT( m_dispatch );
t.destroy( m_allocationCallbacks, *m_dispatch );
}
private:
- Optional<const AllocationCallbacks> m_allocationCallbacks;
- Dispatch const* m_dispatch;
+ Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+ Dispatch const * m_dispatch = nullptr;
};
template <typename OwnerType, typename Dispatch>
class ObjectFree
{
- public:
- ObjectFree()
- : m_owner()
- , m_allocationCallbacks( nullptr )
- , m_dispatch( nullptr )
- {}
+ public:
+ ObjectFree() = default;
- ObjectFree( OwnerType owner, Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT
- : m_owner( owner )
- , m_allocationCallbacks( allocationCallbacks )
- , m_dispatch( &dispatch )
- {}
+ ObjectFree( OwnerType owner,
+ Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ : m_owner( owner )
+ , m_allocationCallbacks( allocationCallbacks )
+ , m_dispatch( &dispatch )
+ {}
- OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
- Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+ OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_owner;
+ }
- protected:
- template <typename T>
- void destroy(T t) VULKAN_HPP_NOEXCEPT
- {
- assert( m_owner && m_dispatch );
- m_owner.free( t, m_allocationCallbacks, *m_dispatch );
- }
+ Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_allocationCallbacks;
+ }
- private:
- OwnerType m_owner;
- Optional<const AllocationCallbacks> m_allocationCallbacks;
- Dispatch const* m_dispatch;
+ protected:
+ template <typename T>
+ void destroy( T t ) VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+ m_owner.free( t, m_allocationCallbacks, *m_dispatch );
+ }
+
+ private:
+ OwnerType m_owner = {};
+ Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+ Dispatch const * m_dispatch = nullptr;
};
template <typename OwnerType, typename PoolType, typename Dispatch>
class PoolFree
{
public:
- PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ PoolFree() = default;
+
+ PoolFree( OwnerType owner,
+ PoolType pool,
+ Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_pool( pool )
, m_dispatch( &dispatch )
@@ -2922,97 +3698,83 @@ namespace VULKAN_HPP_NAMESPACE
}
private:
- OwnerType m_owner;
- PoolType m_pool;
- Dispatch const* m_dispatch;
+ OwnerType m_owner = OwnerType();
+ PoolType m_pool = PoolType();
+ Dispatch const * m_dispatch = nullptr;
};
- template <typename T, size_t N, size_t I>
- class ConstExpression1DArrayCopy
- {
- public:
- VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N], std::array<T,N> const& src) VULKAN_HPP_NOEXCEPT
- {
- dst[I-1] = src[I-1];
- ConstExpression1DArrayCopy<T, N, I - 1>::copy(dst, src);
- }
- };
+ using Bool32 = uint32_t;
+ using DeviceAddress = uint64_t;
+ using DeviceSize = uint64_t;
+ using SampleMask = uint32_t;
- template <typename T, size_t N>
- class ConstExpression1DArrayCopy<T, N, 0>
+ template <typename EnumType, EnumType value>
+ struct CppType
+ {};
+
+ template <typename Type>
+ struct isVulkanHandleType
{
- public:
- VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N], std::array<T,N> const& /*src*/) VULKAN_HPP_NOEXCEPT {}
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
};
- template <typename T, size_t N, size_t M, size_t I, size_t J>
- class ConstExpression2DArrayCopy
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ enum class AccelerationStructureBuildTypeKHR
{
- public:
- VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N][M], std::array<std::array<T,M>, N> const& src) VULKAN_HPP_NOEXCEPT
- {
- dst[I - 1][J - 1] = src[I - 1][J - 1];
- ConstExpression2DArrayCopy<T, N, M, I, J - 1>::copy(dst, src);
- }
+ eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
+ eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR,
+ eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR
};
- template <typename T, size_t N, size_t M, size_t I>
- class ConstExpression2DArrayCopy<T, N, M, I, 0>
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
{
- public:
- VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N][M], std::array<std::array<T, M>, N> const& src) VULKAN_HPP_NOEXCEPT
+ switch ( value )
{
- ConstExpression2DArrayCopy<T, N, M, I - 1, M>::copy(dst, src);
+ case AccelerationStructureBuildTypeKHR::eHost : return "Host";
+ case AccelerationStructureBuildTypeKHR::eDevice : return "Device";
+ case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice";
+ default: return "invalid";
}
- };
-
- template <typename T, size_t N, size_t M>
- class ConstExpression2DArrayCopy<T, N, M, 0, 0>
- {
- public:
- VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N][M], std::array<std::array<T, M>, N> const& /*src*/) VULKAN_HPP_NOEXCEPT {}
- };
-
- using Bool32 = uint32_t;
- using DeviceAddress = uint64_t;
- using DeviceSize = uint64_t;
- using SampleMask = uint32_t;
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- enum class AccelerationStructureMemoryRequirementsTypeNV
+ enum class AccelerationStructureMemoryRequirementsTypeKHR
{
- eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
- eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
- eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
+ eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR,
+ eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR,
+ eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR
};
+ using AccelerationStructureMemoryRequirementsTypeNV = AccelerationStructureMemoryRequirementsTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeKHR value )
{
switch ( value )
{
- case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object";
- case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch";
- case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch";
+ case AccelerationStructureMemoryRequirementsTypeKHR::eObject : return "Object";
+ case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch : return "BuildScratch";
+ case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch : return "UpdateScratch";
default: return "invalid";
}
}
- enum class AccelerationStructureTypeNV
+ enum class AccelerationStructureTypeKHR
{
- eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV,
- eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV
+ eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+ eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR
};
+ using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeNV value )
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
{
switch ( value )
{
- case AccelerationStructureTypeNV::eTopLevel : return "TopLevel";
- case AccelerationStructureTypeNV::eBottomLevel : return "BottomLevel";
+ case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel";
+ case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel";
default: return "invalid";
}
}
- enum class AccessFlagBits
+ enum class AccessFlagBits : VkAccessFlags
{
eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
eIndexRead = VK_ACCESS_INDEX_READ_BIT,
@@ -3035,13 +3797,15 @@ namespace VULKAN_HPP_NAMESPACE
eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
- eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
- eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+ eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+ eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
+ eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+ eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
+ eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
- eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
- eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT
+ eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV
};
VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
@@ -3069,18 +3833,18 @@ namespace VULKAN_HPP_NAMESPACE
case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
- case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX";
- case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX";
case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
+ case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR";
+ case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR";
case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV";
- case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV";
- case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV";
case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
+ case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
+ case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
default: return "invalid";
}
}
- enum class AcquireProfilingLockFlagBitsKHR
+ enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
{};
VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
@@ -3088,7 +3852,7 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- enum class AttachmentDescriptionFlagBits
+ enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
{
eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
};
@@ -3123,7 +3887,8 @@ namespace VULKAN_HPP_NAMESPACE
enum class AttachmentStoreOp
{
eStore = VK_ATTACHMENT_STORE_OP_STORE,
- eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
+ eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM
};
VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
@@ -3132,6 +3897,7 @@ namespace VULKAN_HPP_NAMESPACE
{
case AttachmentStoreOp::eStore : return "Store";
case AttachmentStoreOp::eDontCare : return "DontCare";
+ case AttachmentStoreOp::eNoneQCOM : return "NoneQCOM";
default: return "invalid";
}
}
@@ -3325,7 +4091,9 @@ namespace VULKAN_HPP_NAMESPACE
eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
- eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
+ eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+ eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT,
+ eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT
};
VULKAN_HPP_INLINE std::string to_string( BorderColor value )
@@ -3338,11 +4106,13 @@ namespace VULKAN_HPP_NAMESPACE
case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack";
case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite";
case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite";
+ case BorderColor::eFloatCustomEXT : return "FloatCustomEXT";
+ case BorderColor::eIntCustomEXT : return "IntCustomEXT";
default: return "invalid";
}
}
- enum class BufferCreateFlagBits
+ enum class BufferCreateFlagBits : VkBufferCreateFlags
{
eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
@@ -3366,7 +4136,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class BufferUsageFlagBits
+ enum class BufferUsageFlagBits : VkBufferUsageFlags
{
eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
@@ -3381,6 +4151,7 @@ namespace VULKAN_HPP_NAMESPACE
eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+ eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR,
eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
@@ -3403,37 +4174,30 @@ namespace VULKAN_HPP_NAMESPACE
case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
- case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV";
+ case BufferUsageFlagBits::eRayTracingKHR : return "RayTracingKHR";
default: return "invalid";
}
}
- enum class BufferViewCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+ enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
{
- return "(void)";
- }
-
- enum class BuildAccelerationStructureFlagBitsNV
- {
- eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV,
- eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV,
- ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV,
- ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV,
- eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV
+ eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+ eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+ ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+ ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+ eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR
};
+ using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
- VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value )
+ VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
{
switch ( value )
{
- case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate";
- case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction";
- case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace";
- case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild";
- case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction";
+ case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace";
+ case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild";
+ case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory";
default: return "invalid";
}
}
@@ -3475,7 +4239,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ColorComponentFlagBits
+ enum class ColorComponentFlagBits : VkColorComponentFlags
{
eR = VK_COLOR_COMPONENT_R_BIT,
eG = VK_COLOR_COMPONENT_G_BIT,
@@ -3557,7 +4321,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CommandBufferResetFlagBits
+ enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
{
eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
};
@@ -3571,7 +4335,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CommandBufferUsageFlagBits
+ enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
{
eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
@@ -3589,7 +4353,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CommandPoolCreateFlagBits
+ enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
{
eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
@@ -3607,7 +4371,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CommandPoolResetFlagBits
+ enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
{
eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
};
@@ -3709,7 +4473,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CompositeAlphaFlagBitsKHR
+ enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
{
eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
@@ -3729,7 +4493,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ConditionalRenderingFlagBitsEXT
+ enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT
{
eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
};
@@ -3761,18 +4525,23 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CopyAccelerationStructureModeNV
+ enum class CopyAccelerationStructureModeKHR
{
- eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV,
- eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV
+ eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+ eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+ eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR,
+ eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR
};
+ using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
- VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeNV value )
+ VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
{
switch ( value )
{
- case CopyAccelerationStructureModeNV::eClone : return "Clone";
- case CopyAccelerationStructureModeNV::eCompact : return "Compact";
+ case CopyAccelerationStructureModeKHR::eClone : return "Clone";
+ case CopyAccelerationStructureModeKHR::eCompact : return "Compact";
+ case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize";
+ case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize";
default: return "invalid";
}
}
@@ -3813,7 +4582,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class CullModeFlagBits
+ enum class CullModeFlagBits : VkCullModeFlags
{
eNone = VK_CULL_MODE_NONE,
eFront = VK_CULL_MODE_FRONT_BIT,
@@ -3833,7 +4602,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DebugReportFlagBitsEXT
+ enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT
{
eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
@@ -3888,16 +4657,15 @@ namespace VULKAN_HPP_NAMESPACE
eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
- eObjectTableNVX = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
- eIndirectCommandsLayoutNVX = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+ eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
- eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT,
eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
- eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT
+ eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
+ eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT
};
VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
@@ -3935,17 +4703,15 @@ namespace VULKAN_HPP_NAMESPACE
case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR";
case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR";
- case DebugReportObjectTypeEXT::eObjectTableNVX : return "ObjectTableNVX";
- case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX";
case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT";
case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
- case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV";
+ case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR";
default: return "invalid";
}
}
- enum class DebugUtilsMessageSeverityFlagBitsEXT
+ enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT
{
eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
@@ -3965,7 +4731,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DebugUtilsMessageTypeFlagBitsEXT
+ enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
{
eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
@@ -3983,13 +4749,13 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DependencyFlagBits
+ enum class DependencyFlagBits : VkDependencyFlags
{
eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
- eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR,
- eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR
+ eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
+ eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
};
VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
@@ -4003,7 +4769,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DescriptorBindingFlagBits
+ enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
{
eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
@@ -4024,7 +4790,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DescriptorPoolCreateFlagBits
+ enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
{
eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
@@ -4041,7 +4807,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DescriptorSetLayoutCreateFlagBits
+ enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
{
eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
@@ -4072,6 +4838,7 @@ namespace VULKAN_HPP_NAMESPACE
eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
+ eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV
};
@@ -4091,7 +4858,7 @@ namespace VULKAN_HPP_NAMESPACE
case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic";
case DescriptorType::eInputAttachment : return "InputAttachment";
case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT";
- case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV";
+ case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
default: return "invalid";
}
}
@@ -4121,6 +4888,24 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
+ enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV
+ {
+ eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
+ eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
+ eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo";
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking";
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints";
+ default: return "invalid";
+ }
+ }
+
enum class DeviceEventTypeEXT
{
eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
@@ -4135,7 +4920,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DeviceGroupPresentModeFlagBitsKHR
+ enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
{
eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
@@ -4155,7 +4940,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DeviceQueueCreateFlagBits
+ enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
{
eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
};
@@ -4199,7 +4984,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DisplayPlaneAlphaFlagBitsKHR
+ enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
{
eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
@@ -4251,6 +5036,8 @@ namespace VULKAN_HPP_NAMESPACE
eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY,
eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+ eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE,
+ eMoltenvk = VK_DRIVER_ID_MOLTENVK,
eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR
};
using DriverIdKHR = DriverId;
@@ -4271,6 +5058,8 @@ namespace VULKAN_HPP_NAMESPACE
case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader";
case DriverId::eGgpProprietary : return "GgpProprietary";
case DriverId::eBroadcomProprietary : return "BroadcomProprietary";
+ case DriverId::eMesaLlvmpipe : return "MesaLlvmpipe";
+ case DriverId::eMoltenvk : return "Moltenvk";
default: return "invalid";
}
}
@@ -4292,7 +5081,19 @@ namespace VULKAN_HPP_NAMESPACE
eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
- eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT
+ eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
+ eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
+ eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
+ ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
+ eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT,
+ eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
+ eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
+ eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
+ eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
+ eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
+ eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
+ eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
+ eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT
};
VULKAN_HPP_INLINE std::string to_string( DynamicState value )
@@ -4315,11 +5116,23 @@ namespace VULKAN_HPP_NAMESPACE
case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV";
case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV";
case DynamicState::eLineStippleEXT : return "LineStippleEXT";
+ case DynamicState::eCullModeEXT : return "CullModeEXT";
+ case DynamicState::eFrontFaceEXT : return "FrontFaceEXT";
+ case DynamicState::ePrimitiveTopologyEXT : return "PrimitiveTopologyEXT";
+ case DynamicState::eViewportWithCountEXT : return "ViewportWithCountEXT";
+ case DynamicState::eScissorWithCountEXT : return "ScissorWithCountEXT";
+ case DynamicState::eVertexInputBindingStrideEXT : return "VertexInputBindingStrideEXT";
+ case DynamicState::eDepthTestEnableEXT : return "DepthTestEnableEXT";
+ case DynamicState::eDepthWriteEnableEXT : return "DepthWriteEnableEXT";
+ case DynamicState::eDepthCompareOpEXT : return "DepthCompareOpEXT";
+ case DynamicState::eDepthBoundsTestEnableEXT : return "DepthBoundsTestEnableEXT";
+ case DynamicState::eStencilTestEnableEXT : return "StencilTestEnableEXT";
+ case DynamicState::eStencilOpEXT : return "StencilOpEXT";
default: return "invalid";
}
}
- enum class ExternalFenceFeatureFlagBits
+ enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
{
eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
@@ -4336,7 +5149,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalFenceHandleTypeFlagBits
+ enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
{
eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
@@ -4357,7 +5170,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalMemoryFeatureFlagBits
+ enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
{
eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
@@ -4376,7 +5189,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalMemoryFeatureFlagBitsNV
+ enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
{
eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
@@ -4394,7 +5207,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalMemoryHandleTypeFlagBits
+ enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
{
eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
@@ -4429,7 +5242,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalMemoryHandleTypeFlagBitsNV
+ enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV
{
eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
@@ -4449,7 +5262,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalSemaphoreFeatureFlagBits
+ enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
{
eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
@@ -4466,13 +5279,14 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ExternalSemaphoreHandleTypeFlagBits
+ enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
{
eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
- eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT
+ eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+ eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT
};
using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
@@ -4489,7 +5303,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class FenceCreateFlagBits
+ enum class FenceCreateFlagBits : VkFenceCreateFlags
{
eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
};
@@ -4503,7 +5317,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class FenceImportFlagBits
+ enum class FenceImportFlagBits : VkFenceImportFlags
{
eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
};
@@ -4780,40 +5594,42 @@ namespace VULKAN_HPP_NAMESPACE
eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
- eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
+ eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
+ eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
+ eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
+ eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
+ eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
- eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
- eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
- eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
- eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
- eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
- eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
- eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
- eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
- eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
- eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
- eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
+ eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
+ eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
- eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR,
- eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
- eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
- eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
- eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
- eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
+ eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
+ eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
- eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
- eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
- eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
- eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR
+ eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
+ eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
+ eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
+ eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
+ eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+ eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
+ eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
+ eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
+ eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
+ eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
+ eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
+ eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
+ eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
+ eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
+ eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR
};
VULKAN_HPP_INLINE std::string to_string( Format value )
@@ -5061,11 +5877,13 @@ namespace VULKAN_HPP_NAMESPACE
case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT";
case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT";
case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT";
+ case Format::eA4R4G4B4UnormPack16EXT : return "A4R4G4B4UnormPack16EXT";
+ case Format::eA4B4G4R4UnormPack16EXT : return "A4B4G4R4UnormPack16EXT";
default: return "invalid";
}
}
- enum class FormatFeatureFlagBits
+ enum class FormatFeatureFlagBits : VkFormatFeatureFlags
{
eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
@@ -5091,18 +5909,19 @@ namespace VULKAN_HPP_NAMESPACE
eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+ eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
- eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
- eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
- eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+ eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+ eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
- eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
- eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+ eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
+ eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
- eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
- eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
- eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
+ eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+ eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+ eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+ eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
};
VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
@@ -5133,12 +5952,13 @@ namespace VULKAN_HPP_NAMESPACE
case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax";
case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG";
+ case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR";
case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
default: return "invalid";
}
}
- enum class FramebufferCreateFlagBits
+ enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
{
eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
@@ -5191,59 +6011,65 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- enum class GeometryFlagBitsNV
+ enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
{
- eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV,
- eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV
+ eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR,
+ eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR
};
+ using GeometryFlagBitsNV = GeometryFlagBitsKHR;
- VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value )
+ VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
{
switch ( value )
{
- case GeometryFlagBitsNV::eOpaque : return "Opaque";
- case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
+ case GeometryFlagBitsKHR::eOpaque : return "Opaque";
+ case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
default: return "invalid";
}
}
- enum class GeometryInstanceFlagBitsNV
+ enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
{
- eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
- eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV,
- eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV,
- eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV
+ eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+ eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+ eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+ eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+ eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV
};
+ using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
- VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value )
+ VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
{
switch ( value )
{
- case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable";
- case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
- case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque";
- case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque";
+ case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable";
+ case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
+ case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque";
+ case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque";
default: return "invalid";
}
}
- enum class GeometryTypeNV
+ enum class GeometryTypeKHR
{
- eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV,
- eAabbs = VK_GEOMETRY_TYPE_AABBS_NV
+ eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+ eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR,
+ eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR
};
+ using GeometryTypeNV = GeometryTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( GeometryTypeNV value )
+ VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
{
switch ( value )
{
- case GeometryTypeNV::eTriangles : return "Triangles";
- case GeometryTypeNV::eAabbs : return "Aabbs";
+ case GeometryTypeKHR::eTriangles : return "Triangles";
+ case GeometryTypeKHR::eAabbs : return "Aabbs";
+ case GeometryTypeKHR::eInstances : return "Instances";
default: return "invalid";
}
}
- enum class ImageAspectFlagBits
+ enum class ImageAspectFlagBits : VkImageAspectFlags
{
eColor = VK_IMAGE_ASPECT_COLOR_BIT,
eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
@@ -5280,7 +6106,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ImageCreateFlagBits
+ enum class ImageCreateFlagBits : VkImageCreateFlags
{
eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
@@ -5297,12 +6123,12 @@ namespace VULKAN_HPP_NAMESPACE
eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
- eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+ eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
- eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
- eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR
+ eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+ eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
};
VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
@@ -5349,10 +6175,10 @@ namespace VULKAN_HPP_NAMESPACE
eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
- eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
- eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+ eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+ eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
};
@@ -5420,7 +6246,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ImageUsageFlagBits
+ enum class ImageUsageFlagBits : VkImageUsageFlags
{
eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
@@ -5452,9 +6278,10 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ImageViewCreateFlagBits
+ enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
{
- eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT
+ eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
+ eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT
};
VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
@@ -5462,6 +6289,7 @@ namespace VULKAN_HPP_NAMESPACE
switch ( value )
{
case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
+ case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT : return "FragmentDensityMapDeferredEXT";
default: return "invalid";
}
}
@@ -5496,8 +6324,9 @@ namespace VULKAN_HPP_NAMESPACE
{
eUint16 = VK_INDEX_TYPE_UINT16,
eUint32 = VK_INDEX_TYPE_UINT32,
- eNoneNV = VK_INDEX_TYPE_NONE_NV,
- eUint8EXT = VK_INDEX_TYPE_UINT8_EXT
+ eNoneKHR = VK_INDEX_TYPE_NONE_KHR,
+ eUint8EXT = VK_INDEX_TYPE_UINT8_EXT,
+ eNoneNV = VK_INDEX_TYPE_NONE_NV
};
VULKAN_HPP_INLINE std::string to_string( IndexType value )
@@ -5506,56 +6335,68 @@ namespace VULKAN_HPP_NAMESPACE
{
case IndexType::eUint16 : return "Uint16";
case IndexType::eUint32 : return "Uint32";
- case IndexType::eNoneNV : return "NoneNV";
+ case IndexType::eNoneKHR : return "NoneKHR";
case IndexType::eUint8EXT : return "Uint8EXT";
default: return "invalid";
}
}
- enum class IndirectCommandsLayoutUsageFlagBitsNVX
+ enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
{
- eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
- eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
- eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
- eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+ eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
+ eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV,
+ eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value )
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
{
switch ( value )
{
- case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences";
- case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences";
- case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions";
- case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess";
+ case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences";
default: return "invalid";
}
}
- enum class IndirectCommandsTokenTypeNVX
+ enum class IndirectCommandsTokenTypeNV
{
- ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
- eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX,
- eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX,
- eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX,
- ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX,
- eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX,
- eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX,
- eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX
+ eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
+ eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
+ eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
+ eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
+ ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
+ eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
+ eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
+ eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV
};
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNVX value )
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
{
switch ( value )
{
- case IndirectCommandsTokenTypeNVX::ePipeline : return "Pipeline";
- case IndirectCommandsTokenTypeNVX::eDescriptorSet : return "DescriptorSet";
- case IndirectCommandsTokenTypeNVX::eIndexBuffer : return "IndexBuffer";
- case IndirectCommandsTokenTypeNVX::eVertexBuffer : return "VertexBuffer";
- case IndirectCommandsTokenTypeNVX::ePushConstant : return "PushConstant";
- case IndirectCommandsTokenTypeNVX::eDrawIndexed : return "DrawIndexed";
- case IndirectCommandsTokenTypeNVX::eDraw : return "Draw";
- case IndirectCommandsTokenTypeNVX::eDispatch : return "Dispatch";
+ case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup";
+ case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags";
+ case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer";
+ case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer";
+ case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant";
+ case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed";
+ case IndirectCommandsTokenTypeNV::eDraw : return "Draw";
+ case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks";
+ default: return "invalid";
+ }
+ }
+
+ enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
+ {
+ eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface";
default: return "invalid";
}
}
@@ -5646,7 +6487,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class MemoryAllocateFlagBits
+ enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
{
eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
@@ -5665,7 +6506,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class MemoryHeapFlagBits
+ enum class MemoryHeapFlagBits : VkMemoryHeapFlags
{
eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
@@ -5700,7 +6541,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class MemoryPropertyFlagBits
+ enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
{
eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
@@ -5728,44 +6569,6 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ObjectEntryTypeNVX
- {
- eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
- ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX,
- eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX,
- eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX,
- ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX
- };
-
- VULKAN_HPP_INLINE std::string to_string( ObjectEntryTypeNVX value )
- {
- switch ( value )
- {
- case ObjectEntryTypeNVX::eDescriptorSet : return "DescriptorSet";
- case ObjectEntryTypeNVX::ePipeline : return "Pipeline";
- case ObjectEntryTypeNVX::eIndexBuffer : return "IndexBuffer";
- case ObjectEntryTypeNVX::eVertexBuffer : return "VertexBuffer";
- case ObjectEntryTypeNVX::ePushConstant : return "PushConstant";
- default: return "invalid";
- }
- }
-
- enum class ObjectEntryUsageFlagBitsNVX
- {
- eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
- eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
- };
-
- VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value )
- {
- switch ( value )
- {
- case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics";
- case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute";
- default: return "invalid";
- }
- }
-
enum class ObjectType
{
eUnknown = VK_OBJECT_TYPE_UNKNOWN,
@@ -5801,12 +6604,14 @@ namespace VULKAN_HPP_NAMESPACE
eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
- eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX,
- eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX,
eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
+ eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
- eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
+ eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
+ eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
+ ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
+ eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
};
@@ -5848,17 +6653,22 @@ namespace VULKAN_HPP_NAMESPACE
case ObjectType::eDisplayKHR : return "DisplayKHR";
case ObjectType::eDisplayModeKHR : return "DisplayModeKHR";
case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
- case ObjectType::eObjectTableNVX : return "ObjectTableNVX";
- case ObjectType::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX";
case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT";
+ case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT";
- case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV";
case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL";
+ case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR";
+ case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV";
+ case ObjectType::ePrivateDataSlotEXT : return "PrivateDataSlotEXT";
default: return "invalid";
}
}
- enum class PeerMemoryFeatureFlagBits
+ template<ObjectType value>
+ struct cpp_type
+ {};
+
+ enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
{
eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
@@ -5893,7 +6703,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class PerformanceCounterDescriptionFlagBitsKHR
+ enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
{
ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR,
eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR
@@ -5915,8 +6725,8 @@ namespace VULKAN_HPP_NAMESPACE
eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
- eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR,
- eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR
+ eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR,
+ eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR
};
VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
@@ -6068,6 +6878,7 @@ namespace VULKAN_HPP_NAMESPACE
{
eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+ eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
};
@@ -6077,17 +6888,23 @@ namespace VULKAN_HPP_NAMESPACE
{
case PipelineBindPoint::eGraphics : return "Graphics";
case PipelineBindPoint::eCompute : return "Compute";
- case PipelineBindPoint::eRayTracingNV : return "RayTracingNV";
+ case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR";
default: return "invalid";
}
}
- enum class PipelineCacheCreateFlagBits
- {};
+ enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
+ {
+ eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT
+ };
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits )
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
{
- return "(void)";
+ switch ( value )
+ {
+ case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT";
+ default: return "invalid";
+ }
}
enum class PipelineCacheHeaderVersion
@@ -6104,15 +6921,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class PipelineColorBlendStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineCompilerControlFlagBitsAMD
+ enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
{};
VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
@@ -6120,18 +6929,28 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- enum class PipelineCreateFlagBits
+ enum class PipelineCreateFlagBits : VkPipelineCreateFlags
{
eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+ eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
+ eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
+ eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
+ eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
+ eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
+ eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
- eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
- eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR
+ eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
+ eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
+ eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
+ eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
+ eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
+ eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR
};
VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
@@ -6143,14 +6962,24 @@ namespace VULKAN_HPP_NAMESPACE
case PipelineCreateFlagBits::eDerivative : return "Derivative";
case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
+ case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR";
+ case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR";
case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
+ case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV";
+ case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR";
+ case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT";
+ case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT";
default: return "invalid";
}
}
- enum class PipelineCreationFeedbackFlagBitsEXT
+ enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT
{
eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
@@ -6168,22 +6997,6 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class PipelineDepthStencilStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineDynamicStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineExecutableStatisticFormatKHR
{
eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
@@ -6204,39 +7017,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class PipelineInputAssemblyStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineLayoutCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineMultisampleStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineRasterizationStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineShaderStageCreateFlagBits
+ enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
{
eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
@@ -6252,7 +7033,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class PipelineStageFlagBits
+ enum class PipelineStageFlagBits : VkPipelineStageFlags
{
eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
@@ -6273,13 +7054,15 @@ namespace VULKAN_HPP_NAMESPACE
eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
- eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
+ eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+ eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
- eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
- eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
- eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT
+ eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+ eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+ eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+ eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV
};
VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
@@ -6305,41 +7088,17 @@ namespace VULKAN_HPP_NAMESPACE
case PipelineStageFlagBits::eAllCommands : return "AllCommands";
case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
- case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX";
+ case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR";
+ case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR";
case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
- case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV";
- case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV";
case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV";
case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
+ case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV";
default: return "invalid";
}
}
- enum class PipelineTessellationStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineVertexInputStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineViewportStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PointClippingBehavior
{
eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
@@ -6435,7 +7194,15 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class QueryControlFlagBits
+ enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ enum class QueryControlFlagBits : VkQueryControlFlags
{
ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
};
@@ -6449,7 +7216,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class QueryPipelineStatisticFlagBits
+ enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
{
eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
@@ -6505,7 +7272,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class QueryResultFlagBits
+ enum class QueryResultFlagBits : VkQueryResultFlags
{
e64 = VK_QUERY_RESULT_64_BIT,
eWait = VK_QUERY_RESULT_WAIT_BIT,
@@ -6532,8 +7299,10 @@ namespace VULKAN_HPP_NAMESPACE
eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
- eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
- ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL
+ eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
+ eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR,
+ ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL,
+ eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV
};
VULKAN_HPP_INLINE std::string to_string( QueryType value )
@@ -6545,13 +7314,14 @@ namespace VULKAN_HPP_NAMESPACE
case QueryType::eTimestamp : return "Timestamp";
case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT";
case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR";
- case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV";
+ case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR";
+ case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR";
case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL";
default: return "invalid";
}
}
- enum class QueueFlagBits
+ enum class QueueFlagBits : VkQueueFlags
{
eGraphics = VK_QUEUE_GRAPHICS_BIT,
eCompute = VK_QUEUE_COMPUTE_BIT,
@@ -6609,33 +7379,40 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class RayTracingShaderGroupTypeNV
+ enum class RayTracingShaderGroupTypeKHR
{
- eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV,
- eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV,
- eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV
+ eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+ eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+ eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR
};
+ using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeNV value )
+ VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
{
switch ( value )
{
- case RayTracingShaderGroupTypeNV::eGeneral : return "General";
- case RayTracingShaderGroupTypeNV::eTrianglesHitGroup : return "TrianglesHitGroup";
- case RayTracingShaderGroupTypeNV::eProceduralHitGroup : return "ProceduralHitGroup";
+ case RayTracingShaderGroupTypeKHR::eGeneral : return "General";
+ case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup";
+ case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup";
default: return "invalid";
}
}
- enum class RenderPassCreateFlagBits
- {};
+ enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
+ {
+ eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM
+ };
- VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits )
+ VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
{
- return "(void)";
+ switch ( value )
+ {
+ case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM";
+ default: return "invalid";
+ }
}
- enum class ResolveModeFlagBits
+ enum class ResolveModeFlagBits : VkResolveModeFlags
{
eNone = VK_RESOLVE_MODE_NONE,
eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
@@ -6690,14 +7467,21 @@ namespace VULKAN_HPP_NAMESPACE
eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+ eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR,
eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
- eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
- eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+ eThreadIdleKHR = VK_THREAD_IDLE_KHR,
+ eThreadDoneKHR = VK_THREAD_DONE_KHR,
+ eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR,
+ eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR,
+ ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT,
eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
- eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR
+ eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+ eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
+ eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
+ eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT
};
VULKAN_HPP_INLINE std::string to_string( Result value )
@@ -6734,14 +7518,20 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR";
case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT";
case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV";
+ case Result::eErrorIncompatibleVersionKHR : return "ErrorIncompatibleVersionKHR";
case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT";
case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT";
+ case Result::eThreadIdleKHR : return "ThreadIdleKHR";
+ case Result::eThreadDoneKHR : return "ThreadDoneKHR";
+ case Result::eOperationDeferredKHR : return "OperationDeferredKHR";
+ case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR";
+ case Result::ePipelineCompileRequiredEXT : return "PipelineCompileRequiredEXT";
default: return "invalid";
}
}
- enum class SampleCountFlagBits
+ enum class SampleCountFlagBits : VkSampleCountFlags
{
e1 = VK_SAMPLE_COUNT_1_BIT,
e2 = VK_SAMPLE_COUNT_2_BIT,
@@ -6790,7 +7580,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SamplerCreateFlagBits
+ enum class SamplerCreateFlagBits : VkSamplerCreateFlags
{
eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
@@ -6901,7 +7691,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SemaphoreCreateFlagBits
+ enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags
{};
VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
@@ -6909,7 +7699,7 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- enum class SemaphoreImportFlagBits
+ enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
{
eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
};
@@ -6941,7 +7731,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SemaphoreWaitFlagBits
+ enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
{
eAny = VK_SEMAPHORE_WAIT_ANY_BIT
};
@@ -6956,7 +7746,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ShaderCorePropertiesFlagBitsAMD
+ enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
{};
VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
@@ -7001,7 +7791,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ShaderModuleCreateFlagBits
+ enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
{};
VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
@@ -7009,7 +7799,7 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- enum class ShaderStageFlagBits
+ enum class ShaderStageFlagBits : VkShaderStageFlags
{
eVertex = VK_SHADER_STAGE_VERTEX_BIT,
eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
@@ -7019,14 +7809,20 @@ namespace VULKAN_HPP_NAMESPACE
eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
eAll = VK_SHADER_STAGE_ALL,
- eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
+ eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+ eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+ eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+ eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR,
+ eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+ eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+ eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
+ eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
+ eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
- eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
- eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
- eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
- eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV
+ eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
+ eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV
};
VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
@@ -7041,12 +7837,12 @@ namespace VULKAN_HPP_NAMESPACE
case ShaderStageFlagBits::eCompute : return "Compute";
case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
case ShaderStageFlagBits::eAll : return "All";
- case ShaderStageFlagBits::eRaygenNV : return "RaygenNV";
- case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV";
- case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV";
- case ShaderStageFlagBits::eMissNV : return "MissNV";
- case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV";
- case ShaderStageFlagBits::eCallableNV : return "CallableNV";
+ case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR";
+ case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR";
+ case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR";
+ case ShaderStageFlagBits::eMissKHR : return "MissKHR";
+ case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR";
+ case ShaderStageFlagBits::eCallableKHR : return "CallableKHR";
case ShaderStageFlagBits::eTaskNV : return "TaskNV";
case ShaderStageFlagBits::eMeshNV : return "MeshNV";
default: return "invalid";
@@ -7105,7 +7901,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SparseImageFormatFlagBits
+ enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
{
eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
@@ -7123,7 +7919,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SparseMemoryBindFlagBits
+ enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
{
eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
};
@@ -7137,7 +7933,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class StencilFaceFlagBits
+ enum class StencilFaceFlagBits : VkStencilFaceFlags
{
eFront = VK_STENCIL_FACE_FRONT_BIT,
eBack = VK_STENCIL_FACE_BACK_BIT,
@@ -7378,6 +8174,7 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
+ eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
@@ -7410,12 +8207,6 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
- eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
- eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
- eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
- eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
- eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
- eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
@@ -7479,6 +8270,26 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+ eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR,
+ eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
+ eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
+ eAccelerationStructureCreateGeometryTypeInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR,
+ eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
+ eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
+ eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
+ eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
+ eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
+ eAccelerationStructureMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR,
+ eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR,
+ eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
+ eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
+ eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
+ ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR,
+ ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR,
+ eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
+ eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
+ eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
+ eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
@@ -7490,6 +8301,8 @@ namespace VULKAN_HPP_NAMESPACE
eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
+ ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
+ ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
@@ -7499,8 +8312,6 @@ namespace VULKAN_HPP_NAMESPACE
eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
- eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
- eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
@@ -7533,7 +8344,7 @@ namespace VULKAN_HPP_NAMESPACE
eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
- eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+ eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
@@ -7576,7 +8387,10 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
+ ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
+ ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
+ eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR,
ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
@@ -7584,120 +8398,161 @@ namespace VULKAN_HPP_NAMESPACE
ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
+ ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
+ eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
+ eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
+ eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
+ eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
+ eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
+ eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
+ ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
- ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
- ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
- eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
- eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
- ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
- ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
- ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
- ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
- eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
- eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
- ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
- eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
- ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
- eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
- ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
- eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
- eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
- eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
- eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
- eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+ eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
+ eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
+ ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
+ ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
+ eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
+ ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
+ ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
+ ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
+ ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
+ eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
+ ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
+ ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
+ ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
+ eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
+ ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
+ ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
+ ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
+ eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
+ eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
+ eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
+ eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
+ eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
+ eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
+ eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
+ eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
+ eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
+ eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
+ eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
+ ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
+ eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
+ eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+ eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+ eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+ eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+ eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+ eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
- ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+ eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+ eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+ eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+ eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+ eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+ eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+ eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+ eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
+ eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+ eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+ eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+ eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+ eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
- ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
- eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
- ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+ eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
+ eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
+ eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+ eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+ eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+ eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
- ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+ eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+ eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
- eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
- ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
- eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
- ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
- ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
- ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
- eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
- ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+ eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
- eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
- eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
- eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
- eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
- eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
- eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
- eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
- eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
- ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
- eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
- eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
- ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
- eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
- eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
- ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
- ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
- ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
- eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
- eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
- ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
- eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
- eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+ eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+ eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
+ eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
+ eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+ eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+ eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
- eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
- eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
- eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
- eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
- eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
- eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
- ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
- eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
- eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
- eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
- eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+ ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+ ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+ ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+ ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+ ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
- eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
- eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
- ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
- eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
- ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
- ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
- ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+ ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+ ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+ ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+ ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+ ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+ ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
- ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
- eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+ ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+ ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
+ ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+ ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+ ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+ ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
+ ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+ ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
+ ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
+ ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
+ ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+ ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+ ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
+ ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+ ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+ ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+ ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+ ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+ ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+ ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+ ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+ ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+ ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+ ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+ ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+ ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
+ eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+ eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+ eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+ eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+ eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
+ eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
+ eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+ eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
+ eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
+ eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
+ eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
- eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
- eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
- ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
- ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
- ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
- eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
- eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
- ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
- eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
- eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
- ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
- ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
- eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
- eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
- eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
- eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
- ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT
+ eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+ eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+ eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+ eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+ eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+ eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+ eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
+ eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+ eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV
};
VULKAN_HPP_INLINE std::string to_string( StructureType value )
@@ -7896,6 +8751,7 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT";
case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT";
case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX";
+ case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX";
case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD";
case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP";
case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV";
@@ -7928,12 +8784,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT";
case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT";
case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR";
- case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX";
- case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX";
- case StructureType::eCmdProcessCommandsInfoNVX : return "CmdProcessCommandsInfoNVX";
- case StructureType::eCmdReserveSpaceForCommandsInfoNVX : return "CmdReserveSpaceForCommandsInfoNVX";
- case StructureType::eDeviceGeneratedCommandsLimitsNVX : return "DeviceGeneratedCommandsLimitsNVX";
- case StructureType::eDeviceGeneratedCommandsFeaturesNVX : return "DeviceGeneratedCommandsFeaturesNVX";
case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV";
case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT";
case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT";
@@ -7997,6 +8847,26 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT";
case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV";
+ case StructureType::eBindAccelerationStructureMemoryInfoKHR : return "BindAccelerationStructureMemoryInfoKHR";
+ case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR";
+ case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR";
+ case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR : return "AccelerationStructureCreateGeometryTypeInfoKHR";
+ case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR";
+ case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR";
+ case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR";
+ case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR";
+ case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR";
+ case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR : return "AccelerationStructureMemoryRequirementsInfoKHR";
+ case StructureType::eAccelerationStructureVersionKHR : return "AccelerationStructureVersionKHR";
+ case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR";
+ case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR";
+ case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR";
+ case StructureType::ePhysicalDeviceRayTracingFeaturesKHR : return "PhysicalDeviceRayTracingFeaturesKHR";
+ case StructureType::ePhysicalDeviceRayTracingPropertiesKHR : return "PhysicalDeviceRayTracingPropertiesKHR";
+ case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR";
+ case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR";
+ case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR";
+ case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR";
case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV";
case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
@@ -8008,6 +8878,8 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT";
case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT";
case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT";
+ case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR : return "PhysicalDevicePortabilitySubsetFeaturesKHR";
+ case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR : return "PhysicalDevicePortabilitySubsetPropertiesKHR";
case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV";
case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV";
case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV";
@@ -8017,8 +8889,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eGeometryNV : return "GeometryNV";
case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV";
case StructureType::eGeometryAabbNV : return "GeometryAabbNV";
- case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV";
- case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV";
case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV";
case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV";
case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV";
@@ -8051,7 +8921,7 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eCheckpointDataNV : return "CheckpointDataNV";
case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV";
case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
- case StructureType::eQueryPoolCreateInfoINTEL : return "QueryPoolCreateInfoINTEL";
+ case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL";
case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL";
case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL";
case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL";
@@ -8094,7 +8964,10 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT";
case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT";
case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT";
+ case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT : return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
+ case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT : return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
+ case StructureType::eDeferredOperationInfoKHR : return "DeferredOperationInfoKHR";
case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR";
case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR";
@@ -8102,13 +8975,51 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR";
case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR";
case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT";
+ case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
+ case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV";
+ case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV";
+ case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV";
+ case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV";
+ case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV";
+ case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV";
+ case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT";
+ case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
+ case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM";
+ case StructureType::ePhysicalDeviceRobustness2FeaturesEXT : return "PhysicalDeviceRobustness2FeaturesEXT";
+ case StructureType::ePhysicalDeviceRobustness2PropertiesEXT : return "PhysicalDeviceRobustness2PropertiesEXT";
+ case StructureType::eSamplerCustomBorderColorCreateInfoEXT : return "SamplerCustomBorderColorCreateInfoEXT";
+ case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT : return "PhysicalDeviceCustomBorderColorPropertiesEXT";
+ case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT : return "PhysicalDeviceCustomBorderColorFeaturesEXT";
+ case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR";
+ case StructureType::ePhysicalDevicePrivateDataFeaturesEXT : return "PhysicalDevicePrivateDataFeaturesEXT";
+ case StructureType::eDevicePrivateDataCreateInfoEXT : return "DevicePrivateDataCreateInfoEXT";
+ case StructureType::ePrivateDataSlotCreateInfoEXT : return "PrivateDataSlotCreateInfoEXT";
+ case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT";
+ case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
+ case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV";
+ case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT : return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
+ case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT : return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
+ case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT : return "PhysicalDeviceImageRobustnessFeaturesEXT";
+ case StructureType::eCopyBufferInfo2KHR : return "CopyBufferInfo2KHR";
+ case StructureType::eCopyImageInfo2KHR : return "CopyImageInfo2KHR";
+ case StructureType::eCopyBufferToImageInfo2KHR : return "CopyBufferToImageInfo2KHR";
+ case StructureType::eCopyImageToBufferInfo2KHR : return "CopyImageToBufferInfo2KHR";
+ case StructureType::eBlitImageInfo2KHR : return "BlitImageInfo2KHR";
+ case StructureType::eResolveImageInfo2KHR : return "ResolveImageInfo2KHR";
+ case StructureType::eBufferCopy2KHR : return "BufferCopy2KHR";
+ case StructureType::eImageCopy2KHR : return "ImageCopy2KHR";
+ case StructureType::eImageBlit2KHR : return "ImageBlit2KHR";
+ case StructureType::eBufferImageCopy2KHR : return "BufferImageCopy2KHR";
+ case StructureType::eImageResolve2KHR : return "ImageResolve2KHR";
+ case StructureType::ePhysicalDevice4444FormatsFeaturesEXT : return "PhysicalDevice4444FormatsFeaturesEXT";
+ case StructureType::eDirectfbSurfaceCreateInfoEXT : return "DirectfbSurfaceCreateInfoEXT";
default: return "invalid";
}
}
- enum class SubgroupFeatureFlagBits
+ enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
{
eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
@@ -8154,10 +9065,12 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SubpassDescriptionFlagBits
+ enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
{
ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
- ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
+ ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
+ eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
+ eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM
};
VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
@@ -8166,11 +9079,13 @@ namespace VULKAN_HPP_NAMESPACE
{
case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
+ case SubpassDescriptionFlagBits::eFragmentRegionQCOM : return "FragmentRegionQCOM";
+ case SubpassDescriptionFlagBits::eShaderResolveQCOM : return "ShaderResolveQCOM";
default: return "invalid";
}
}
- enum class SurfaceCounterFlagBitsEXT
+ enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
{
eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
};
@@ -8184,7 +9099,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SurfaceTransformFlagBitsKHR
+ enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
{
eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
@@ -8214,7 +9129,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SwapchainCreateFlagBitsKHR
+ enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
{
eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
@@ -8291,7 +9206,7 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class ToolPurposeFlagBitsEXT
+ enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT
{
eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT,
eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT,
@@ -8377,7 +9292,9 @@ namespace VULKAN_HPP_NAMESPACE
{
eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
- eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT
+ eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
+ eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
+ eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT
};
VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
@@ -8387,6 +9304,8 @@ namespace VULKAN_HPP_NAMESPACE
case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted";
case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot";
case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices";
+ case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf";
+ case ValidationFeatureEnableEXT::eSynchronizationValidation : return "SynchronizationValidation";
default: return "invalid";
}
}
@@ -8395,7 +9314,9 @@ namespace VULKAN_HPP_NAMESPACE
{
eVIV = VK_VENDOR_ID_VIV,
eVSI = VK_VENDOR_ID_VSI,
- eKazan = VK_VENDOR_ID_KAZAN
+ eKazan = VK_VENDOR_ID_KAZAN,
+ eCodeplay = VK_VENDOR_ID_CODEPLAY,
+ eMESA = VK_VENDOR_ID_MESA
};
VULKAN_HPP_INLINE std::string to_string( VendorId value )
@@ -8405,6 +9326,8 @@ namespace VULKAN_HPP_NAMESPACE
case VendorId::eVIV : return "VIV";
case VendorId::eVSI : return "VSI";
case VendorId::eKazan : return "Kazan";
+ case VendorId::eCodeplay : return "Codeplay";
+ case VendorId::eMESA : return "MESA";
default: return "invalid";
}
}
@@ -8453,18 +9376,54 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- template<ObjectType value>
- struct cpp_type
+ template<typename T>
+ struct IndexTypeValue
+ {};
+
+ template <>
+ struct IndexTypeValue<uint16_t>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16;
};
- using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
+ template <>
+ struct CppType<IndexType, IndexType::eUint16>
+ {
+ using Type = uint16_t;
+ };
+
+ template <>
+ struct IndexTypeValue<uint32_t>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32;
+ };
+
+ template <>
+ struct CppType<IndexType, IndexType::eUint32>
+ {
+ using Type = uint32_t;
+ };
+
+ template <>
+ struct IndexTypeValue<uint8_t>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT;
+ };
+
+ template <>
+ struct CppType<IndexType, IndexType::eUint8EXT>
+ {
+ using Type = uint8_t;
+ };
+
+
+ using AccessFlags = Flags<AccessFlagBits>;
template <> struct FlagTraits<AccessFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT)
+ allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) | VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) | VkFlags(AccessFlagBits::eCommandPreprocessReadNV) | VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
};
};
@@ -8490,6 +9449,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8514,25 +9474,27 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | ";
if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | ";
if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | ";
- if ( value & AccessFlagBits::eCommandProcessReadNVX ) result += "CommandProcessReadNVX | ";
- if ( value & AccessFlagBits::eCommandProcessWriteNVX ) result += "CommandProcessWriteNVX | ";
if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | ";
+ if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | ";
+ if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | ";
if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | ";
- if ( value & AccessFlagBits::eAccelerationStructureReadNV ) result += "AccelerationStructureReadNV | ";
- if ( value & AccessFlagBits::eAccelerationStructureWriteNV ) result += "AccelerationStructureWriteNV | ";
if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | ";
+ if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | ";
+ if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR, VkAcquireProfilingLockFlagsKHR>;
+
+ using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
{
+
return "{}";
}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- enum class AndroidSurfaceCreateFlagBitsKHR
+ enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
@@ -8540,19 +9502,21 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
+ using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
+
+ using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
template <> struct FlagTraits<AttachmentDescriptionFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
};
@@ -8580,6 +9544,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8587,11 +9552,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
+
+ using BufferCreateFlags = Flags<BufferCreateFlagBits>;
template <> struct FlagTraits<BufferCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay)
};
@@ -8619,6 +9585,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8630,13 +9597,14 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
+
+ using BufferUsageFlags = Flags<BufferUsageFlagBits>;
template <> struct FlagTraits<BufferUsageFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV)
+ allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingKHR)
};
};
@@ -8662,6 +9630,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8678,65 +9647,79 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | ";
if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | ";
if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
- if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | ";
+ if ( value & BufferUsageFlagBits::eRayTracingKHR ) result += "RayTracingKHR | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
+ enum class BufferViewCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
{
+
return "{}";
}
- using BuildAccelerationStructureFlagsNV = Flags<BuildAccelerationStructureFlagBitsNV, VkBuildAccelerationStructureFlagsNV>;
- template <> struct FlagTraits<BuildAccelerationStructureFlagBitsNV>
+ using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
+
+ template <> struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory)
+ allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return BuildAccelerationStructureFlagsNV( bit0 ) | bit1;
+ return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator&( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return BuildAccelerationStructureFlagsNV( bit0 ) & bit1;
+ return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator^( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return BuildAccelerationStructureFlagsNV( bit0 ) ^ bit1;
+ return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
{
- return ~( BuildAccelerationStructureFlagsNV( bits ) );
+ return ~( BuildAccelerationStructureFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value )
+ using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
+
+ VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
- if ( value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate ) result += "AllowUpdate | ";
- if ( value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction ) result += "AllowCompaction | ";
- if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace ) result += "PreferFastTrace | ";
- if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild ) result += "PreferFastBuild | ";
- if ( value & BuildAccelerationStructureFlagBitsNV::eLowMemory ) result += "LowMemory | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
+
+ using ColorComponentFlags = Flags<ColorComponentFlagBits>;
template <> struct FlagTraits<ColorComponentFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
};
@@ -8764,6 +9747,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8774,11 +9758,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
+
+ using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
template <> struct FlagTraits<CommandBufferResetFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
};
@@ -8806,6 +9791,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8813,11 +9799,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
+
+ using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
template <> struct FlagTraits<CommandBufferUsageFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
};
@@ -8845,6 +9832,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8854,11 +9842,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
+
+ using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
template <> struct FlagTraits<CommandPoolCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected)
};
@@ -8886,6 +9875,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8895,11 +9885,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
+
+ using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
template <> struct FlagTraits<CommandPoolResetFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
};
@@ -8927,6 +9918,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8934,7 +9926,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class CommandPoolTrimFlagBits
+ enum class CommandPoolTrimFlagBits : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
@@ -8942,20 +9934,22 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits, VkCommandPoolTrimFlags>;
+ using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags )
{
+
return "{}";
}
- using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
+
+ using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
};
@@ -8983,6 +9977,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -8993,11 +9988,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT, VkConditionalRenderingFlagsEXT>;
+
+ using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted)
};
@@ -9025,6 +10021,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9032,11 +10029,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
+
+ using CullModeFlags = Flags<CullModeFlagBits>;
template <> struct FlagTraits<CullModeFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
};
@@ -9064,6 +10062,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9072,11 +10071,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
+
+ using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
template <> struct FlagTraits<DebugReportFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
};
@@ -9104,6 +10104,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9115,11 +10116,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT, VkDebugUtilsMessageSeverityFlagsEXT>;
+
+ using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError)
};
@@ -9147,6 +10149,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9157,11 +10160,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT, VkDebugUtilsMessageTypeFlagsEXT>;
+
+ using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance)
};
@@ -9189,6 +10193,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9198,7 +10203,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DebugUtilsMessengerCallbackDataFlagBitsEXT
+ enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
@@ -9206,14 +10211,15 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT, VkDebugUtilsMessengerCallbackDataFlagsEXT>;
+ using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )
{
+
return "{}";
}
- enum class DebugUtilsMessengerCreateFlagBitsEXT
+ enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
@@ -9221,18 +10227,20 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT, VkDebugUtilsMessengerCreateFlagsEXT>;
+ using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT )
{
+
return "{}";
}
- using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
+
+ using DependencyFlags = Flags<DependencyFlagBits>;
template <> struct FlagTraits<DependencyFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal)
};
@@ -9260,6 +10268,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9269,11 +10278,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits, VkDescriptorBindingFlags>;
+
+ using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
template <> struct FlagTraits<DescriptorBindingFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBits::ePartiallyBound) | VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount)
};
@@ -9303,6 +10313,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9313,11 +10324,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
+
+ using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind)
};
@@ -9345,6 +10357,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9353,7 +10366,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DescriptorPoolResetFlagBits
+ enum class DescriptorPoolResetFlagBits : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
@@ -9361,18 +10374,20 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
+ using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;
VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags )
{
+
return "{}";
}
- using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
+
+ using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) | VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR)
};
@@ -9400,6 +10415,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9408,7 +10424,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DescriptorUpdateTemplateCreateFlagBits
+ enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
@@ -9416,27 +10432,74 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits, VkDescriptorUpdateTemplateCreateFlags>;
+ using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags )
{
+
return "{}";
}
- using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
+
+ using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
{
+
return "{}";
}
- using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR, VkDeviceGroupPresentModeFlagsKHR>;
+
+ using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
+
+ template <> struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( DeviceDiagnosticsConfigFlagsNV( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | ";
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | ";
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+
+ using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice)
};
@@ -9464,6 +10527,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9474,11 +10538,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
+
+ using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
template <> struct FlagTraits<DeviceQueueCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected)
};
@@ -9506,6 +10571,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9513,7 +10579,25 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DisplayModeCreateFlagBitsKHR
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
+
+ VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT )
+ {
+
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+ enum class DisplayModeCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
@@ -9521,18 +10605,20 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
+ using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR )
{
+
return "{}";
}
- using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
+
+ using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
};
@@ -9560,6 +10646,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9570,7 +10657,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DisplaySurfaceCreateFlagBitsKHR
+ enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
@@ -9578,14 +10665,15 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
+ using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR )
{
+
return "{}";
}
- enum class EventCreateFlagBits
+ enum class EventCreateFlagBits : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits )
@@ -9593,18 +10681,20 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
+ using EventCreateFlags = Flags<EventCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( EventCreateFlags )
{
+
return "{}";
}
- using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits, VkExternalFenceFeatureFlags>;
+
+ using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable)
};
@@ -9634,6 +10724,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9642,11 +10733,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits, VkExternalFenceHandleTypeFlags>;
+
+ using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd)
};
@@ -9676,6 +10768,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9686,11 +10779,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits, VkExternalMemoryFeatureFlags>;
+
+ using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable)
};
@@ -9720,6 +10814,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9729,11 +10824,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+
+ using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
};
@@ -9761,6 +10857,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9770,11 +10867,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits, VkExternalMemoryHandleTypeFlags>;
+
+ using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT)
};
@@ -9804,6 +10902,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9821,11 +10920,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
+
+ using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
};
@@ -9853,6 +10953,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9863,11 +10964,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits, VkExternalSemaphoreFeatureFlags>;
+
+ using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable)
};
@@ -9897,6 +10999,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9905,11 +11008,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits, VkExternalSemaphoreHandleTypeFlags>;
+
+ using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd)
};
@@ -9939,6 +11043,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9950,11 +11055,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
+
+ using FenceCreateFlags = Flags<FenceCreateFlagBits>;
template <> struct FlagTraits<FenceCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
};
@@ -9982,6 +11088,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -9989,11 +11096,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using FenceImportFlags = Flags<FenceImportFlagBits, VkFenceImportFlags>;
+
+ using FenceImportFlags = Flags<FenceImportFlagBits>;
template <> struct FlagTraits<FenceImportFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(FenceImportFlagBits::eTemporary)
};
@@ -10023,6 +11131,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10030,13 +11139,14 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
+
+ using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
template <> struct FlagTraits<FormatFeatureFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT)
+ allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT)
};
};
@@ -10062,6 +11172,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10089,15 +11200,17 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | ";
if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | ";
if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | ";
+ if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | ";
if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
+
+ using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
template <> struct FlagTraits<FramebufferCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(FramebufferCreateFlagBits::eImageless)
};
@@ -10125,6 +11238,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10132,89 +11246,97 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using GeometryFlagsNV = Flags<GeometryFlagBitsNV, VkGeometryFlagsNV>;
- template <> struct FlagTraits<GeometryFlagBitsNV>
+ using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
+
+ template <> struct FlagTraits<GeometryFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation)
+ allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) | VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return GeometryFlagsNV( bit0 ) | bit1;
+ return GeometryFlagsKHR( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator&( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return GeometryFlagsNV( bit0 ) & bit1;
+ return GeometryFlagsKHR( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator^( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return GeometryFlagsNV( bit0 ) ^ bit1;
+ return GeometryFlagsKHR( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
{
- return ~( GeometryFlagsNV( bits ) );
+ return ~( GeometryFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value )
+ using GeometryFlagsNV = GeometryFlagsKHR;
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
- if ( value & GeometryFlagBitsNV::eOpaque ) result += "Opaque | ";
- if ( value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | ";
+ if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | ";
+ if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using GeometryInstanceFlagsNV = Flags<GeometryInstanceFlagBitsNV, VkGeometryInstanceFlagsNV>;
- template <> struct FlagTraits<GeometryInstanceFlagBitsNV>
+ using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
+
+ template <> struct FlagTraits<GeometryInstanceFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque)
+ allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) | VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return GeometryInstanceFlagsNV( bit0 ) | bit1;
+ return GeometryInstanceFlagsKHR( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator&( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return GeometryInstanceFlagsNV( bit0 ) & bit1;
+ return GeometryInstanceFlagsKHR( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator^( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return GeometryInstanceFlagsNV( bit0 ) ^ bit1;
+ return GeometryInstanceFlagsKHR( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
{
- return ~( GeometryInstanceFlagsNV( bits ) );
+ return ~( GeometryInstanceFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value )
+ using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
- if ( value & GeometryInstanceFlagBitsNV::eTriangleCullDisable ) result += "TriangleCullDisable | ";
- if ( value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | ";
- if ( value & GeometryInstanceFlagBitsNV::eForceOpaque ) result += "ForceOpaque | ";
- if ( value & GeometryInstanceFlagBitsNV::eForceNoOpaque ) result += "ForceNoOpaque | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class HeadlessSurfaceCreateFlagBitsEXT
+ enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
@@ -10222,15 +11344,16 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT, VkHeadlessSurfaceCreateFlagsEXT>;
+ using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT )
{
+
return "{}";
}
#ifdef VK_USE_PLATFORM_IOS_MVK
- enum class IOSSurfaceCreateFlagBitsMVK
+ enum class IOSSurfaceCreateFlagBitsMVK : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
@@ -10238,19 +11361,21 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK, VkIOSSurfaceCreateFlagsMVK>;
+ using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_IOS_MVK*/
- using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
+
+ using ImageAspectFlags = Flags<ImageAspectFlagBits>;
template <> struct FlagTraits<ImageAspectFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT)
};
@@ -10278,6 +11403,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10295,11 +11421,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
+
+ using ImageCreateFlags = Flags<ImageCreateFlagBits>;
template <> struct FlagTraits<ImageCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT)
};
@@ -10327,6 +11454,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10349,7 +11477,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifdef VK_USE_PLATFORM_FUCHSIA
- enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA
+ enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
@@ -10357,19 +11485,21 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA, VkImagePipeSurfaceCreateFlagsFUCHSIA>;
+ using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_FUCHSIA*/
- using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
+
+ using ImageUsageFlags = Flags<ImageUsageFlagBits>;
template <> struct FlagTraits<ImageUsageFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT)
};
@@ -10397,6 +11527,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10413,13 +11544,14 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
+
+ using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
template <> struct FlagTraits<ImageViewCreateFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT)
+ allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) | VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT)
};
};
@@ -10445,64 +11577,110 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | ";
+ if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) result += "FragmentDensityMapDeferredEXT | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
- template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
+ using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
+
+ template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
+ allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
{
- return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
+ return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator&( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
{
- return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) & bit1;
+ return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator^( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
{
- return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) ^ bit1;
+ return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
{
- return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
+ return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) );
}
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value )
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value )
{
+
if ( !value ) return "{}";
std::string result;
- if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences ) result += "UnorderedSequences | ";
- if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences ) result += "SparseSequences | ";
- if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions ) result += "EmptyExecutions | ";
- if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences ) result += "IndexedSequences | ";
+ if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | ";
+ if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | ";
+ if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
+
+ using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
+
+ template <> struct FlagTraits<IndirectStateFlagBitsNV>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return IndirectStateFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return IndirectStateFlagsNV( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return IndirectStateFlagsNV( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( IndirectStateFlagsNV( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+
+ using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags )
{
+
return "{}";
}
#ifdef VK_USE_PLATFORM_MACOS_MVK
- enum class MacOSSurfaceCreateFlagBitsMVK
+ enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
@@ -10510,19 +11688,21 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK, VkMacOSSurfaceCreateFlagsMVK>;
+ using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits, VkMemoryAllocateFlags>;
+
+ using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
template <> struct FlagTraits<MemoryAllocateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddress) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay)
};
@@ -10552,6 +11732,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10561,11 +11742,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
+
+ using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
template <> struct FlagTraits<MemoryHeapFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance)
};
@@ -10593,6 +11775,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10601,7 +11784,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class MemoryMapFlagBits
+ enum class MemoryMapFlagBits : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
@@ -10609,18 +11792,20 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
+ using MemoryMapFlags = Flags<MemoryMapFlagBits>;
VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
{
+
return "{}";
}
- using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
+
+ using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
template <> struct FlagTraits<MemoryPropertyFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD)
};
@@ -10648,6 +11833,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10663,7 +11849,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifdef VK_USE_PLATFORM_METAL_EXT
- enum class MetalSurfaceCreateFlagBitsEXT
+ enum class MetalSurfaceCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
@@ -10671,59 +11857,21 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT, VkMetalSurfaceCreateFlagsEXT>;
+ using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_METAL_EXT*/
- using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
-
- template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
- {
- enum
- {
- allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
- };
- };
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator&( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ObjectEntryUsageFlagsNVX( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator^( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ObjectEntryUsageFlagsNVX( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ObjectEntryUsageFlagsNVX( bits ) );
- }
- VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value )
- {
- if ( !value ) return "{}";
- std::string result;
-
- if ( value & ObjectEntryUsageFlagBitsNVX::eGraphics ) result += "Graphics | ";
- if ( value & ObjectEntryUsageFlagBitsNVX::eCompute ) result += "Compute | ";
- return "{ " + result.substr(0, result.size() - 3) + " }";
- }
-
- using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits, VkPeerMemoryFeatureFlags>;
+ using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst)
};
@@ -10753,6 +11901,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10763,11 +11912,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR, VkPerformanceCounterDescriptionFlagsKHR>;
+
+ using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted)
};
@@ -10795,6 +11945,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10803,28 +11954,73 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags )
+ using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
+
+ template <> struct FlagTraits<PipelineCacheCreateFlagBits>
{
- return "{}";
+ enum : VkFlags
+ {
+ allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PipelineCacheCreateFlags( bit0 ) | bit1;
}
- using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PipelineCacheCreateFlags( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PipelineCacheCreateFlags( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( PipelineCacheCreateFlags( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+ enum class PipelineColorBlendStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags )
{
+
return "{}";
}
- using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD, VkPipelineCompilerControlFlagsAMD>;
+
+ using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD )
{
+
return "{}";
}
- enum class PipelineCoverageModulationStateCreateFlagBitsNV
+ enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
@@ -10832,14 +12028,15 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV, VkPipelineCoverageModulationStateCreateFlagsNV>;
+ using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;
VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV )
{
+
return "{}";
}
- enum class PipelineCoverageReductionStateCreateFlagBitsNV
+ enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
@@ -10847,14 +12044,15 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV, VkPipelineCoverageReductionStateCreateFlagsNV>;
+ using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV )
{
+
return "{}";
}
- enum class PipelineCoverageToColorStateCreateFlagBitsNV
+ enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
@@ -10862,20 +12060,22 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV, VkPipelineCoverageToColorStateCreateFlagsNV>;
+ using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV )
{
+
return "{}";
}
- using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
+
+ using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
template <> struct FlagTraits<PipelineCreateFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR)
+ allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) | VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) | VkFlags(PipelineCreateFlagBits::eLibraryKHR) | VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) | VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT)
};
};
@@ -10901,6 +12101,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10909,17 +12110,28 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | ";
if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | ";
if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | ";
if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | ";
if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | ";
if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | ";
+ if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | ";
+ if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | ";
+ if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | ";
+ if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT, VkPipelineCreationFeedbackFlagsEXT>;
+
+ using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT>;
template <> struct FlagTraits<PipelineCreationFeedbackFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration)
};
@@ -10947,6 +12159,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -10956,14 +12169,23 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
+ enum class PipelineDepthStencilStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags )
{
+
return "{}";
}
- enum class PipelineDiscardRectangleStateCreateFlagBitsEXT
+ enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
@@ -10971,42 +12193,79 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT, VkPipelineDiscardRectangleStateCreateFlagsEXT>;
+ using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT )
{
+
return "{}";
}
- using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
+ enum class PipelineDynamicStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
{
+
return "{}";
}
- using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
+ enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
{
+
return "{}";
}
- using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
+ enum class PipelineLayoutCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags )
{
+
return "{}";
}
- using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
+ enum class PipelineMultisampleStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
{
+
return "{}";
}
- enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT
+ enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
@@ -11014,14 +12273,15 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT, VkPipelineRasterizationConservativeStateCreateFlagsEXT>;
+ using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT )
{
+
return "{}";
}
- enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT
+ enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
@@ -11029,21 +12289,31 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT, VkPipelineRasterizationDepthClipStateCreateFlagsEXT>;
+ using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT )
{
+
return "{}";
}
- using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
+ enum class PipelineRasterizationStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
{
+
return "{}";
}
- enum class PipelineRasterizationStateStreamCreateFlagBitsEXT
+ enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
@@ -11051,18 +12321,20 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT, VkPipelineRasterizationStateStreamCreateFlagsEXT>;
+ using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT )
{
+
return "{}";
}
- using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
+
+ using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT)
};
@@ -11090,6 +12362,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11098,13 +12371,14 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
+
+ using PipelineStageFlags = Flags<PipelineStageFlagBits>;
template <> struct FlagTraits<PipelineStageFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT)
+ allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) | VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
};
};
@@ -11130,6 +12404,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11152,38 +12427,65 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | ";
if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | ";
if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
- if ( value & PipelineStageFlagBits::eCommandProcessNVX ) result += "CommandProcessNVX | ";
+ if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | ";
+ if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | ";
if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
- if ( value & PipelineStageFlagBits::eRayTracingShaderNV ) result += "RayTracingShaderNV | ";
- if ( value & PipelineStageFlagBits::eAccelerationStructureBuildNV ) result += "AccelerationStructureBuildNV | ";
if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | ";
if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | ";
if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | ";
+ if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
+ enum class PipelineTessellationStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
{
+
return "{}";
}
- using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
+ enum class PipelineVertexInputStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
{
+
return "{}";
}
- using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
+ enum class PipelineViewportStateCreateFlagBits : VkFlags
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
{
+
return "{}";
}
- enum class PipelineViewportSwizzleStateCreateFlagBitsNV
+ enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
@@ -11191,18 +12493,29 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV, VkPipelineViewportSwizzleStateCreateFlagsNV>;
+ using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;
VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV )
{
+
return "{}";
}
- using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
+
+ using PrivateDataSlotCreateFlagsEXT = Flags<PrivateDataSlotCreateFlagBitsEXT>;
+
+ VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT )
+ {
+
+ return "{}";
+ }
+
+
+ using QueryControlFlags = Flags<QueryControlFlagBits>;
template <> struct FlagTraits<QueryControlFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(QueryControlFlagBits::ePrecise)
};
@@ -11230,6 +12543,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11237,11 +12551,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
+
+ using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
};
@@ -11269,6 +12584,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11286,18 +12602,21 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
+
+ using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
{
+
return "{}";
}
- using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
+
+ using QueryResultFlags = Flags<QueryResultFlagBits>;
template <> struct FlagTraits<QueryResultFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
};
@@ -11325,6 +12644,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11335,11 +12655,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
+
+ using QueueFlags = Flags<QueueFlagBits>;
template <> struct FlagTraits<QueueFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected)
};
@@ -11367,6 +12688,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11378,18 +12700,53 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
- VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags )
+ using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
+
+ template <> struct FlagTraits<RenderPassCreateFlagBits>
{
- return "{}";
+ enum : VkFlags
+ {
+ allFlags = VkFlags(RenderPassCreateFlagBits::eTransformQCOM)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return RenderPassCreateFlags( bit0 ) | bit1;
}
- using ResolveModeFlags = Flags<ResolveModeFlagBits, VkResolveModeFlags>;
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return RenderPassCreateFlags( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return RenderPassCreateFlags( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( RenderPassCreateFlags( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+
+ using ResolveModeFlags = Flags<ResolveModeFlagBits>;
template <> struct FlagTraits<ResolveModeFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ResolveModeFlagBits::eNone) | VkFlags(ResolveModeFlagBits::eSampleZero) | VkFlags(ResolveModeFlagBits::eAverage) | VkFlags(ResolveModeFlagBits::eMin) | VkFlags(ResolveModeFlagBits::eMax)
};
@@ -11419,6 +12776,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11429,11 +12787,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
+
+ using SampleCountFlags = Flags<SampleCountFlagBits>;
template <> struct FlagTraits<SampleCountFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
};
@@ -11461,6 +12820,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11474,11 +12834,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
+
+ using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
template <> struct FlagTraits<SamplerCreateFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT)
};
@@ -11506,6 +12867,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11514,18 +12876,21 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
+
+ using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
{
+
return "{}";
}
- using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits, VkSemaphoreImportFlags>;
+
+ using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
template <> struct FlagTraits<SemaphoreImportFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary)
};
@@ -11555,6 +12920,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11562,11 +12928,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits, VkSemaphoreWaitFlags>;
+
+ using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
template <> struct FlagTraits<SemaphoreWaitFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SemaphoreWaitFlagBits::eAny)
};
@@ -11596,6 +12963,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11603,27 +12971,32 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD, VkShaderCorePropertiesFlagsAMD>;
+
+ using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
{
+
return "{}";
}
- using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
+
+ using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;
VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
{
+
return "{}";
}
- using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
+
+ using ShaderStageFlags = Flags<ShaderStageFlagBits>;
template <> struct FlagTraits<ShaderStageFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV)
+ allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenKHR) | VkFlags(ShaderStageFlagBits::eAnyHitKHR) | VkFlags(ShaderStageFlagBits::eClosestHitKHR) | VkFlags(ShaderStageFlagBits::eMissKHR) | VkFlags(ShaderStageFlagBits::eIntersectionKHR) | VkFlags(ShaderStageFlagBits::eCallableKHR) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV)
};
};
@@ -11649,6 +13022,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11658,22 +13032,23 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | ";
if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | ";
if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | ";
- if ( value & ShaderStageFlagBits::eRaygenNV ) result += "RaygenNV | ";
- if ( value & ShaderStageFlagBits::eAnyHitNV ) result += "AnyHitNV | ";
- if ( value & ShaderStageFlagBits::eClosestHitNV ) result += "ClosestHitNV | ";
- if ( value & ShaderStageFlagBits::eMissNV ) result += "MissNV | ";
- if ( value & ShaderStageFlagBits::eIntersectionNV ) result += "IntersectionNV | ";
- if ( value & ShaderStageFlagBits::eCallableNV ) result += "CallableNV | ";
+ if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | ";
+ if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | ";
+ if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | ";
+ if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | ";
+ if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | ";
+ if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | ";
if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | ";
if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
+
+ using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
template <> struct FlagTraits<SparseImageFormatFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
};
@@ -11701,6 +13076,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11710,11 +13086,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
+
+ using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
template <> struct FlagTraits<SparseMemoryBindFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
};
@@ -11742,6 +13119,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11749,11 +13127,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
+
+ using StencilFaceFlags = Flags<StencilFaceFlagBits>;
template <> struct FlagTraits<StencilFaceFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack)
};
@@ -11781,6 +13160,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11790,7 +13170,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifdef VK_USE_PLATFORM_GGP
- enum class StreamDescriptorSurfaceCreateFlagBitsGGP
+ enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
@@ -11798,19 +13178,21 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP, VkStreamDescriptorSurfaceCreateFlagsGGP>;
+ using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_GGP*/
- using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits, VkSubgroupFeatureFlags>;
+
+ using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
template <> struct FlagTraits<SubgroupFeatureFlagBits>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
};
@@ -11838,6 +13220,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11853,13 +13236,14 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
+
+ using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
template <> struct FlagTraits<SubpassDescriptionFlagBits>
{
- enum
+ enum : VkFlags
{
- allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX)
+ allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) | VkFlags(SubpassDescriptionFlagBits::eFragmentRegionQCOM) | VkFlags(SubpassDescriptionFlagBits::eShaderResolveQCOM)
};
};
@@ -11885,19 +13269,23 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
{
+
if ( !value ) return "{}";
std::string result;
if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | ";
if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | ";
+ if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) result += "FragmentRegionQCOM | ";
+ if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) result += "ShaderResolveQCOM | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
+
+ using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
};
@@ -11925,6 +13313,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11932,11 +13321,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
+
+ using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
};
@@ -11964,6 +13354,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -11979,11 +13370,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
+
+ using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat)
};
@@ -12011,6 +13403,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -12020,11 +13413,12 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT, VkToolPurposeFlagsEXT>;
+
+ using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT>;
template <> struct FlagTraits<ToolPurposeFlagBitsEXT>
{
- enum
+ enum : VkFlags
{
allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers)
};
@@ -12052,6 +13446,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value )
{
+
if ( !value ) return "{}";
std::string result;
@@ -12065,7 +13460,7 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ValidationCacheCreateFlagBitsEXT
+ enum class ValidationCacheCreateFlagBitsEXT : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
@@ -12073,15 +13468,16 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT, VkValidationCacheCreateFlagsEXT>;
+ using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT )
{
+
return "{}";
}
#ifdef VK_USE_PLATFORM_VI_NN
- enum class ViSurfaceCreateFlagBitsNN
+ enum class ViSurfaceCreateFlagBitsNN : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
@@ -12089,16 +13485,17 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
+ using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_VI_NN*/
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- enum class WaylandSurfaceCreateFlagBitsKHR
+ enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
@@ -12106,16 +13503,17 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
+ using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- enum class Win32SurfaceCreateFlagBitsKHR
+ enum class Win32SurfaceCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
@@ -12123,16 +13521,17 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
+ using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_XCB_KHR
- enum class XcbSurfaceCreateFlagBitsKHR
+ enum class XcbSurfaceCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
@@ -12140,16 +13539,17 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
+ using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#ifdef VK_USE_PLATFORM_XLIB_KHR
- enum class XlibSurfaceCreateFlagBitsKHR
+ enum class XlibSurfaceCreateFlagBitsKHR : VkFlags
{};
VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
@@ -12157,10 +13557,11 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
- using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
+ using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR )
{
+
return "{}";
}
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
@@ -12448,6 +13849,15 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
};
+ class IncompatibleVersionKHRError : public SystemError
+ {
+ public:
+ IncompatibleVersionKHRError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {}
+ IncompatibleVersionKHRError( char const * message )
+ : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {}
+ };
+
class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
{
public:
@@ -12475,6 +13885,7 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
};
+
[[noreturn]] static void throwResultException( Result result, char const * message )
{
switch ( result )
@@ -12502,6 +13913,7 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
+ case Result::eErrorIncompatibleVersionKHR: throw IncompatibleVersionKHRError( message );
case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message );
case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
@@ -12537,8 +13949,92 @@ namespace VULKAN_HPP_NAMESPACE
T value;
operator std::tuple<Result&, T&>() VULKAN_HPP_NOEXCEPT { return std::tuple<Result&, T&>(result, value); }
+
+#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST)
+ VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+ operator T const& () const & VULKAN_HPP_NOEXCEPT
+ {
+ return value;
+ }
+
+ VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+ operator T& () & VULKAN_HPP_NOEXCEPT
+ {
+ return value;
+ }
+
+ VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+ operator T const&& () const && VULKAN_HPP_NOEXCEPT
+ {
+ return std::move( value );
+ }
+
+ VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+ operator T&& () && VULKAN_HPP_NOEXCEPT
+ {
+ return std::move( value );
+ }
+#endif
};
+#if !defined(VULKAN_HPP_NO_SMART_HANDLE)
+ template <typename Type, typename Dispatch>
+ struct ResultValue<UniqueHandle<Type,Dispatch>>
+ {
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+ ResultValue(Result r, UniqueHandle<Type, Dispatch> && v) VULKAN_HPP_NOEXCEPT
+#else
+ ResultValue(Result r, UniqueHandle<Type, Dispatch> && v)
+#endif
+ : result(r)
+ , value(std::move(v))
+ {}
+
+ std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
+ {
+ return std::make_tuple( result, std::move( value ) );
+ }
+
+# if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST)
+ VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+ operator UniqueHandle<Type, Dispatch>& () & VULKAN_HPP_NOEXCEPT
+ {
+ return value;
+ }
+
+ VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+ operator UniqueHandle<Type, Dispatch>() VULKAN_HPP_NOEXCEPT
+ {
+ return std::move(value);
+ }
+# endif
+
+ Result result;
+ UniqueHandle<Type, Dispatch> value;
+ };
+
+ template <typename Type, typename Dispatch>
+ struct ResultValue<std::vector<UniqueHandle<Type, Dispatch>>>
+ {
+# ifdef VULKAN_HPP_HAS_NOEXCEPT
+ ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v ) VULKAN_HPP_NOEXCEPT
+# else
+ ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v )
+# endif
+ : result( r )
+ , value( std::move( v ) )
+ {}
+
+ Result result;
+ std::vector<UniqueHandle<Type, Dispatch>> value;
+
+ operator std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>() VULKAN_HPP_NOEXCEPT
+ {
+ return std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>( result, value );
+ }
+ };
+#endif
+
template <typename T>
struct ResultValueType
{
@@ -12563,7 +14059,7 @@ namespace VULKAN_HPP_NAMESPACE
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
ignore(message);
- VULKAN_HPP_ASSERT( result == Result::eSuccess );
+ VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
return result;
#else
if ( result != Result::eSuccess )
@@ -12578,7 +14074,7 @@ namespace VULKAN_HPP_NAMESPACE
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
ignore(message);
- VULKAN_HPP_ASSERT( result == Result::eSuccess );
+ VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
return ResultValue<T>( result, std::move( data ) );
#else
if ( result != Result::eSuccess )
@@ -12593,7 +14089,7 @@ namespace VULKAN_HPP_NAMESPACE
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
ignore(message);
- VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+ VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
#else
if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
{
@@ -12608,7 +14104,7 @@ namespace VULKAN_HPP_NAMESPACE
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
ignore(message);
- VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+ VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
#else
if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
{
@@ -12624,7 +14120,7 @@ namespace VULKAN_HPP_NAMESPACE
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
ignore(message);
- VULKAN_HPP_ASSERT( result == Result::eSuccess );
+ VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
return ResultValue<UniqueHandle<T,D>>( result, UniqueHandle<T,D>(data, deleter) );
#else
if ( result != Result::eSuccess )
@@ -12634,7488 +14130,2978 @@ namespace VULKAN_HPP_NAMESPACE
return UniqueHandle<T,D>(data, deleter);
#endif
}
-#endif
-
- struct AccelerationStructureCreateInfoNV;
- struct AccelerationStructureInfoNV;
- struct AccelerationStructureMemoryRequirementsInfoNV;
- struct AcquireNextImageInfoKHR;
- struct AcquireProfilingLockInfoKHR;
- struct AllocationCallbacks;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct AndroidHardwareBufferFormatPropertiesANDROID;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct AndroidHardwareBufferPropertiesANDROID;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct AndroidHardwareBufferUsageANDROID;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct AndroidSurfaceCreateInfoKHR;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- struct ApplicationInfo;
- struct AttachmentDescription;
- struct AttachmentDescription2;
- using AttachmentDescription2KHR = AttachmentDescription2;
- struct AttachmentDescriptionStencilLayout;
- using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
- struct AttachmentReference;
- struct AttachmentReference2;
- using AttachmentReference2KHR = AttachmentReference2;
- struct AttachmentReferenceStencilLayout;
- using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
- struct AttachmentSampleLocationsEXT;
- struct BaseInStructure;
- struct BaseOutStructure;
- struct BindAccelerationStructureMemoryInfoNV;
- struct BindBufferMemoryDeviceGroupInfo;
- using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
- struct BindBufferMemoryInfo;
- using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
- struct BindImageMemoryDeviceGroupInfo;
- using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
- struct BindImageMemoryInfo;
- using BindImageMemoryInfoKHR = BindImageMemoryInfo;
- struct BindImageMemorySwapchainInfoKHR;
- struct BindImagePlaneMemoryInfo;
- using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
- struct BindSparseInfo;
- struct BufferCopy;
- struct BufferCreateInfo;
- struct BufferDeviceAddressCreateInfoEXT;
- struct BufferDeviceAddressInfo;
- using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
- using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
- struct BufferImageCopy;
- struct BufferMemoryBarrier;
- struct BufferMemoryRequirementsInfo2;
- using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
- struct BufferOpaqueCaptureAddressCreateInfo;
- using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
- struct BufferViewCreateInfo;
- struct CalibratedTimestampInfoEXT;
- struct CheckpointDataNV;
- struct ClearAttachment;
- union ClearColorValue;
- struct ClearDepthStencilValue;
- struct ClearRect;
- union ClearValue;
- struct CmdProcessCommandsInfoNVX;
- struct CmdReserveSpaceForCommandsInfoNVX;
- struct CoarseSampleLocationNV;
- struct CoarseSampleOrderCustomNV;
- struct CommandBufferAllocateInfo;
- struct CommandBufferBeginInfo;
- struct CommandBufferInheritanceConditionalRenderingInfoEXT;
- struct CommandBufferInheritanceInfo;
- struct CommandPoolCreateInfo;
- struct ComponentMapping;
- struct ComputePipelineCreateInfo;
- struct ConditionalRenderingBeginInfoEXT;
- struct ConformanceVersion;
- using ConformanceVersionKHR = ConformanceVersion;
- struct CooperativeMatrixPropertiesNV;
- struct CopyDescriptorSet;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct D3D12FenceSubmitInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct DebugMarkerMarkerInfoEXT;
- struct DebugMarkerObjectNameInfoEXT;
- struct DebugMarkerObjectTagInfoEXT;
- struct DebugReportCallbackCreateInfoEXT;
- struct DebugUtilsLabelEXT;
- struct DebugUtilsMessengerCallbackDataEXT;
- struct DebugUtilsMessengerCreateInfoEXT;
- struct DebugUtilsObjectNameInfoEXT;
- struct DebugUtilsObjectTagInfoEXT;
- struct DedicatedAllocationBufferCreateInfoNV;
- struct DedicatedAllocationImageCreateInfoNV;
- struct DedicatedAllocationMemoryAllocateInfoNV;
- struct DescriptorBufferInfo;
- struct DescriptorImageInfo;
- struct DescriptorPoolCreateInfo;
- struct DescriptorPoolInlineUniformBlockCreateInfoEXT;
- struct DescriptorPoolSize;
- struct DescriptorSetAllocateInfo;
- struct DescriptorSetLayoutBinding;
- struct DescriptorSetLayoutBindingFlagsCreateInfo;
- using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
- struct DescriptorSetLayoutCreateInfo;
- struct DescriptorSetLayoutSupport;
- using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
- struct DescriptorSetVariableDescriptorCountAllocateInfo;
- using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
- struct DescriptorSetVariableDescriptorCountLayoutSupport;
- using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
- struct DescriptorUpdateTemplateCreateInfo;
- using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
- struct DescriptorUpdateTemplateEntry;
- using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
- struct DeviceCreateInfo;
- struct DeviceEventInfoEXT;
- struct DeviceGeneratedCommandsFeaturesNVX;
- struct DeviceGeneratedCommandsLimitsNVX;
- struct DeviceGroupBindSparseInfo;
- using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
- struct DeviceGroupCommandBufferBeginInfo;
- using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
- struct DeviceGroupDeviceCreateInfo;
- using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
- struct DeviceGroupPresentCapabilitiesKHR;
- struct DeviceGroupPresentInfoKHR;
- struct DeviceGroupRenderPassBeginInfo;
- using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
- struct DeviceGroupSubmitInfo;
- using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
- struct DeviceGroupSwapchainCreateInfoKHR;
- struct DeviceMemoryOpaqueCaptureAddressInfo;
- using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
- struct DeviceMemoryOverallocationCreateInfoAMD;
- struct DeviceQueueCreateInfo;
- struct DeviceQueueGlobalPriorityCreateInfoEXT;
- struct DeviceQueueInfo2;
- struct DispatchIndirectCommand;
- struct DisplayEventInfoEXT;
- struct DisplayModeCreateInfoKHR;
- struct DisplayModeParametersKHR;
- struct DisplayModeProperties2KHR;
- struct DisplayModePropertiesKHR;
- struct DisplayNativeHdrSurfaceCapabilitiesAMD;
- struct DisplayPlaneCapabilities2KHR;
- struct DisplayPlaneCapabilitiesKHR;
- struct DisplayPlaneInfo2KHR;
- struct DisplayPlaneProperties2KHR;
- struct DisplayPlanePropertiesKHR;
- struct DisplayPowerInfoEXT;
- struct DisplayPresentInfoKHR;
- struct DisplayProperties2KHR;
- struct DisplayPropertiesKHR;
- struct DisplaySurfaceCreateInfoKHR;
- struct DrawIndexedIndirectCommand;
- struct DrawIndirectCommand;
- struct DrawMeshTasksIndirectCommandNV;
- struct DrmFormatModifierPropertiesEXT;
- struct DrmFormatModifierPropertiesListEXT;
- struct EventCreateInfo;
- struct ExportFenceCreateInfo;
- using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ExportFenceWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct ExportMemoryAllocateInfo;
- using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
- struct ExportMemoryAllocateInfoNV;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ExportMemoryWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ExportMemoryWin32HandleInfoNV;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct ExportSemaphoreCreateInfo;
- using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ExportSemaphoreWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct ExtensionProperties;
- struct Extent2D;
- struct Extent3D;
- struct ExternalBufferProperties;
- using ExternalBufferPropertiesKHR = ExternalBufferProperties;
- struct ExternalFenceProperties;
- using ExternalFencePropertiesKHR = ExternalFenceProperties;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct ExternalFormatANDROID;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- struct ExternalImageFormatProperties;
- using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
- struct ExternalImageFormatPropertiesNV;
- struct ExternalMemoryBufferCreateInfo;
- using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
- struct ExternalMemoryImageCreateInfo;
- using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
- struct ExternalMemoryImageCreateInfoNV;
- struct ExternalMemoryProperties;
- using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
- struct ExternalSemaphoreProperties;
- using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
- struct FenceCreateInfo;
- struct FenceGetFdInfoKHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct FenceGetWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct FilterCubicImageViewImageFormatPropertiesEXT;
- struct FormatProperties;
- struct FormatProperties2;
- using FormatProperties2KHR = FormatProperties2;
- struct FramebufferAttachmentImageInfo;
- using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
- struct FramebufferAttachmentsCreateInfo;
- using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
- struct FramebufferCreateInfo;
- struct FramebufferMixedSamplesCombinationNV;
- struct GeometryAABBNV;
- struct GeometryDataNV;
- struct GeometryNV;
- struct GeometryTrianglesNV;
- struct GraphicsPipelineCreateInfo;
- struct HdrMetadataEXT;
- struct HeadlessSurfaceCreateInfoEXT;
-#ifdef VK_USE_PLATFORM_IOS_MVK
- struct IOSSurfaceCreateInfoMVK;
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
- struct ImageBlit;
- struct ImageCopy;
- struct ImageCreateInfo;
- struct ImageDrmFormatModifierExplicitCreateInfoEXT;
- struct ImageDrmFormatModifierListCreateInfoEXT;
- struct ImageDrmFormatModifierPropertiesEXT;
- struct ImageFormatListCreateInfo;
- using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
- struct ImageFormatProperties;
- struct ImageFormatProperties2;
- using ImageFormatProperties2KHR = ImageFormatProperties2;
- struct ImageMemoryBarrier;
- struct ImageMemoryRequirementsInfo2;
- using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
-#ifdef VK_USE_PLATFORM_FUCHSIA
- struct ImagePipeSurfaceCreateInfoFUCHSIA;
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- struct ImagePlaneMemoryRequirementsInfo;
- using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
- struct ImageResolve;
- struct ImageSparseMemoryRequirementsInfo2;
- using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
- struct ImageStencilUsageCreateInfo;
- using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
- struct ImageSubresource;
- struct ImageSubresourceLayers;
- struct ImageSubresourceRange;
- struct ImageSwapchainCreateInfoKHR;
- struct ImageViewASTCDecodeModeEXT;
- struct ImageViewCreateInfo;
- struct ImageViewHandleInfoNVX;
- struct ImageViewUsageCreateInfo;
- using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct ImportAndroidHardwareBufferInfoANDROID;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- struct ImportFenceFdInfoKHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ImportFenceWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct ImportMemoryFdInfoKHR;
- struct ImportMemoryHostPointerInfoEXT;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ImportMemoryWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ImportMemoryWin32HandleInfoNV;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct ImportSemaphoreFdInfoKHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct ImportSemaphoreWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct IndirectCommandsLayoutCreateInfoNVX;
- struct IndirectCommandsLayoutTokenNVX;
- struct IndirectCommandsTokenNVX;
- struct InitializePerformanceApiInfoINTEL;
- struct InputAttachmentAspectReference;
- using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
- struct InstanceCreateInfo;
- struct LayerProperties;
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- struct MacOSSurfaceCreateInfoMVK;
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- struct MappedMemoryRange;
- struct MemoryAllocateFlagsInfo;
- using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
- struct MemoryAllocateInfo;
- struct MemoryBarrier;
- struct MemoryDedicatedAllocateInfo;
- using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
- struct MemoryDedicatedRequirements;
- using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
- struct MemoryFdPropertiesKHR;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- struct MemoryGetAndroidHardwareBufferInfoANDROID;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- struct MemoryGetFdInfoKHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct MemoryGetWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct MemoryHeap;
- struct MemoryHostPointerPropertiesEXT;
- struct MemoryOpaqueCaptureAddressAllocateInfo;
- using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
- struct MemoryPriorityAllocateInfoEXT;
- struct MemoryRequirements;
- struct MemoryRequirements2;
- using MemoryRequirements2KHR = MemoryRequirements2;
- struct MemoryType;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct MemoryWin32HandlePropertiesKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_METAL_EXT
- struct MetalSurfaceCreateInfoEXT;
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
- struct MultisamplePropertiesEXT;
- struct ObjectTableCreateInfoNVX;
- struct ObjectTableDescriptorSetEntryNVX;
- struct ObjectTableEntryNVX;
- struct ObjectTableIndexBufferEntryNVX;
- struct ObjectTablePipelineEntryNVX;
- struct ObjectTablePushConstantEntryNVX;
- struct ObjectTableVertexBufferEntryNVX;
- struct Offset2D;
- struct Offset3D;
- struct PastPresentationTimingGOOGLE;
- struct PerformanceConfigurationAcquireInfoINTEL;
- struct PerformanceCounterDescriptionKHR;
- struct PerformanceCounterKHR;
- union PerformanceCounterResultKHR;
- struct PerformanceMarkerInfoINTEL;
- struct PerformanceOverrideInfoINTEL;
- struct PerformanceQuerySubmitInfoKHR;
- struct PerformanceStreamMarkerInfoINTEL;
- union PerformanceValueDataINTEL;
- struct PerformanceValueINTEL;
- struct PhysicalDevice16BitStorageFeatures;
- using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
- struct PhysicalDevice8BitStorageFeatures;
- using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
- struct PhysicalDeviceASTCDecodeFeaturesEXT;
- struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
- struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
- struct PhysicalDeviceBufferDeviceAddressFeatures;
- using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
- struct PhysicalDeviceBufferDeviceAddressFeaturesEXT;
- using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
- struct PhysicalDeviceCoherentMemoryFeaturesAMD;
- struct PhysicalDeviceComputeShaderDerivativesFeaturesNV;
- struct PhysicalDeviceConditionalRenderingFeaturesEXT;
- struct PhysicalDeviceConservativeRasterizationPropertiesEXT;
- struct PhysicalDeviceCooperativeMatrixFeaturesNV;
- struct PhysicalDeviceCooperativeMatrixPropertiesNV;
- struct PhysicalDeviceCornerSampledImageFeaturesNV;
- struct PhysicalDeviceCoverageReductionModeFeaturesNV;
- struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
- struct PhysicalDeviceDepthClipEnableFeaturesEXT;
- struct PhysicalDeviceDepthStencilResolveProperties;
- using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
- struct PhysicalDeviceDescriptorIndexingFeatures;
- using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
- struct PhysicalDeviceDescriptorIndexingProperties;
- using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
- struct PhysicalDeviceDiscardRectanglePropertiesEXT;
- struct PhysicalDeviceDriverProperties;
- using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
- struct PhysicalDeviceExclusiveScissorFeaturesNV;
- struct PhysicalDeviceExternalBufferInfo;
- using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
- struct PhysicalDeviceExternalFenceInfo;
- using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
- struct PhysicalDeviceExternalImageFormatInfo;
- using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
- struct PhysicalDeviceExternalMemoryHostPropertiesEXT;
- struct PhysicalDeviceExternalSemaphoreInfo;
- using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
- struct PhysicalDeviceFeatures;
- struct PhysicalDeviceFeatures2;
- using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
- struct PhysicalDeviceFloatControlsProperties;
- using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
- struct PhysicalDeviceFragmentDensityMapFeaturesEXT;
- struct PhysicalDeviceFragmentDensityMapPropertiesEXT;
- struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
- struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
- struct PhysicalDeviceGroupProperties;
- using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
- struct PhysicalDeviceHostQueryResetFeatures;
- using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
- struct PhysicalDeviceIDProperties;
- using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
- struct PhysicalDeviceImageDrmFormatModifierInfoEXT;
- struct PhysicalDeviceImageFormatInfo2;
- using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
- struct PhysicalDeviceImageViewImageFormatInfoEXT;
- struct PhysicalDeviceImagelessFramebufferFeatures;
- using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
- struct PhysicalDeviceIndexTypeUint8FeaturesEXT;
- struct PhysicalDeviceInlineUniformBlockFeaturesEXT;
- struct PhysicalDeviceInlineUniformBlockPropertiesEXT;
- struct PhysicalDeviceLimits;
- struct PhysicalDeviceLineRasterizationFeaturesEXT;
- struct PhysicalDeviceLineRasterizationPropertiesEXT;
- struct PhysicalDeviceMaintenance3Properties;
- using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
- struct PhysicalDeviceMemoryBudgetPropertiesEXT;
- struct PhysicalDeviceMemoryPriorityFeaturesEXT;
- struct PhysicalDeviceMemoryProperties;
- struct PhysicalDeviceMemoryProperties2;
- using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
- struct PhysicalDeviceMeshShaderFeaturesNV;
- struct PhysicalDeviceMeshShaderPropertiesNV;
- struct PhysicalDeviceMultiviewFeatures;
- using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
- struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
- struct PhysicalDeviceMultiviewProperties;
- using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
- struct PhysicalDevicePCIBusInfoPropertiesEXT;
- struct PhysicalDevicePerformanceQueryFeaturesKHR;
- struct PhysicalDevicePerformanceQueryPropertiesKHR;
- struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
- struct PhysicalDevicePointClippingProperties;
- using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
- struct PhysicalDeviceProperties;
- struct PhysicalDeviceProperties2;
- using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
- struct PhysicalDeviceProtectedMemoryFeatures;
- struct PhysicalDeviceProtectedMemoryProperties;
- struct PhysicalDevicePushDescriptorPropertiesKHR;
- struct PhysicalDeviceRayTracingPropertiesNV;
- struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
- struct PhysicalDeviceSampleLocationsPropertiesEXT;
- struct PhysicalDeviceSamplerFilterMinmaxProperties;
- using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
- struct PhysicalDeviceSamplerYcbcrConversionFeatures;
- using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
- struct PhysicalDeviceScalarBlockLayoutFeatures;
- using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
- struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
- using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
- struct PhysicalDeviceShaderAtomicInt64Features;
- using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
- struct PhysicalDeviceShaderClockFeaturesKHR;
- struct PhysicalDeviceShaderCoreProperties2AMD;
- struct PhysicalDeviceShaderCorePropertiesAMD;
- struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
- struct PhysicalDeviceShaderDrawParametersFeatures;
- using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
- struct PhysicalDeviceShaderFloat16Int8Features;
- using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
- using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
- struct PhysicalDeviceShaderImageFootprintFeaturesNV;
- struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
- struct PhysicalDeviceShaderSMBuiltinsFeaturesNV;
- struct PhysicalDeviceShaderSMBuiltinsPropertiesNV;
- struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
- using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
- struct PhysicalDeviceShadingRateImageFeaturesNV;
- struct PhysicalDeviceShadingRateImagePropertiesNV;
- struct PhysicalDeviceSparseImageFormatInfo2;
- using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
- struct PhysicalDeviceSparseProperties;
- struct PhysicalDeviceSubgroupProperties;
- struct PhysicalDeviceSubgroupSizeControlFeaturesEXT;
- struct PhysicalDeviceSubgroupSizeControlPropertiesEXT;
- struct PhysicalDeviceSurfaceInfo2KHR;
- struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
- struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
- struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
- struct PhysicalDeviceTimelineSemaphoreFeatures;
- using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
- struct PhysicalDeviceTimelineSemaphoreProperties;
- using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
- struct PhysicalDeviceToolPropertiesEXT;
- struct PhysicalDeviceTransformFeedbackFeaturesEXT;
- struct PhysicalDeviceTransformFeedbackPropertiesEXT;
- struct PhysicalDeviceUniformBufferStandardLayoutFeatures;
- using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
- struct PhysicalDeviceVariablePointersFeatures;
- using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
- using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
- using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
- struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
- struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
- struct PhysicalDeviceVulkan11Features;
- struct PhysicalDeviceVulkan11Properties;
- struct PhysicalDeviceVulkan12Features;
- struct PhysicalDeviceVulkan12Properties;
- struct PhysicalDeviceVulkanMemoryModelFeatures;
- using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
- struct PhysicalDeviceYcbcrImageArraysFeaturesEXT;
- struct PipelineCacheCreateInfo;
- struct PipelineColorBlendAdvancedStateCreateInfoEXT;
- struct PipelineColorBlendAttachmentState;
- struct PipelineColorBlendStateCreateInfo;
- struct PipelineCompilerControlCreateInfoAMD;
- struct PipelineCoverageModulationStateCreateInfoNV;
- struct PipelineCoverageReductionStateCreateInfoNV;
- struct PipelineCoverageToColorStateCreateInfoNV;
- struct PipelineCreationFeedbackCreateInfoEXT;
- struct PipelineCreationFeedbackEXT;
- struct PipelineDepthStencilStateCreateInfo;
- struct PipelineDiscardRectangleStateCreateInfoEXT;
- struct PipelineDynamicStateCreateInfo;
- struct PipelineExecutableInfoKHR;
- struct PipelineExecutableInternalRepresentationKHR;
- struct PipelineExecutablePropertiesKHR;
- struct PipelineExecutableStatisticKHR;
- union PipelineExecutableStatisticValueKHR;
- struct PipelineInfoKHR;
- struct PipelineInputAssemblyStateCreateInfo;
- struct PipelineLayoutCreateInfo;
- struct PipelineMultisampleStateCreateInfo;
- struct PipelineRasterizationConservativeStateCreateInfoEXT;
- struct PipelineRasterizationDepthClipStateCreateInfoEXT;
- struct PipelineRasterizationLineStateCreateInfoEXT;
- struct PipelineRasterizationStateCreateInfo;
- struct PipelineRasterizationStateRasterizationOrderAMD;
- struct PipelineRasterizationStateStreamCreateInfoEXT;
- struct PipelineRepresentativeFragmentTestStateCreateInfoNV;
- struct PipelineSampleLocationsStateCreateInfoEXT;
- struct PipelineShaderStageCreateInfo;
- struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
- struct PipelineTessellationDomainOriginStateCreateInfo;
- using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
- struct PipelineTessellationStateCreateInfo;
- struct PipelineVertexInputDivisorStateCreateInfoEXT;
- struct PipelineVertexInputStateCreateInfo;
- struct PipelineViewportCoarseSampleOrderStateCreateInfoNV;
- struct PipelineViewportExclusiveScissorStateCreateInfoNV;
- struct PipelineViewportShadingRateImageStateCreateInfoNV;
- struct PipelineViewportStateCreateInfo;
- struct PipelineViewportSwizzleStateCreateInfoNV;
- struct PipelineViewportWScalingStateCreateInfoNV;
-#ifdef VK_USE_PLATFORM_GGP
- struct PresentFrameTokenGGP;
-#endif /*VK_USE_PLATFORM_GGP*/
- struct PresentInfoKHR;
- struct PresentRegionKHR;
- struct PresentRegionsKHR;
- struct PresentTimeGOOGLE;
- struct PresentTimesInfoGOOGLE;
- struct ProtectedSubmitInfo;
- struct PushConstantRange;
- struct QueryPoolCreateInfo;
- struct QueryPoolCreateInfoINTEL;
- struct QueryPoolPerformanceCreateInfoKHR;
- struct QueueFamilyCheckpointPropertiesNV;
- struct QueueFamilyProperties;
- struct QueueFamilyProperties2;
- using QueueFamilyProperties2KHR = QueueFamilyProperties2;
- struct RayTracingPipelineCreateInfoNV;
- struct RayTracingShaderGroupCreateInfoNV;
- struct Rect2D;
- struct RectLayerKHR;
- struct RefreshCycleDurationGOOGLE;
- struct RenderPassAttachmentBeginInfo;
- using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
- struct RenderPassBeginInfo;
- struct RenderPassCreateInfo;
- struct RenderPassCreateInfo2;
- using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
- struct RenderPassFragmentDensityMapCreateInfoEXT;
- struct RenderPassInputAttachmentAspectCreateInfo;
- using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
- struct RenderPassMultiviewCreateInfo;
- using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
- struct RenderPassSampleLocationsBeginInfoEXT;
- struct SampleLocationEXT;
- struct SampleLocationsInfoEXT;
- struct SamplerCreateInfo;
- struct SamplerReductionModeCreateInfo;
- using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
- struct SamplerYcbcrConversionCreateInfo;
- using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
- struct SamplerYcbcrConversionImageFormatProperties;
- using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
- struct SamplerYcbcrConversionInfo;
- using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
- struct SemaphoreCreateInfo;
- struct SemaphoreGetFdInfoKHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct SemaphoreGetWin32HandleInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct SemaphoreSignalInfo;
- using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
- struct SemaphoreTypeCreateInfo;
- using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
- struct SemaphoreWaitInfo;
- using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
- struct ShaderModuleCreateInfo;
- struct ShaderModuleValidationCacheCreateInfoEXT;
- struct ShaderResourceUsageAMD;
- struct ShaderStatisticsInfoAMD;
- struct ShadingRatePaletteNV;
- struct SharedPresentSurfaceCapabilitiesKHR;
- struct SparseBufferMemoryBindInfo;
- struct SparseImageFormatProperties;
- struct SparseImageFormatProperties2;
- using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
- struct SparseImageMemoryBind;
- struct SparseImageMemoryBindInfo;
- struct SparseImageMemoryRequirements;
- struct SparseImageMemoryRequirements2;
- using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
- struct SparseImageOpaqueMemoryBindInfo;
- struct SparseMemoryBind;
- struct SpecializationInfo;
- struct SpecializationMapEntry;
- struct StencilOpState;
-#ifdef VK_USE_PLATFORM_GGP
- struct StreamDescriptorSurfaceCreateInfoGGP;
-#endif /*VK_USE_PLATFORM_GGP*/
- struct SubmitInfo;
- struct SubpassBeginInfo;
- using SubpassBeginInfoKHR = SubpassBeginInfo;
- struct SubpassDependency;
- struct SubpassDependency2;
- using SubpassDependency2KHR = SubpassDependency2;
- struct SubpassDescription;
- struct SubpassDescription2;
- using SubpassDescription2KHR = SubpassDescription2;
- struct SubpassDescriptionDepthStencilResolve;
- using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
- struct SubpassEndInfo;
- using SubpassEndInfoKHR = SubpassEndInfo;
- struct SubpassSampleLocationsEXT;
- struct SubresourceLayout;
- struct SurfaceCapabilities2EXT;
- struct SurfaceCapabilities2KHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct SurfaceCapabilitiesFullScreenExclusiveEXT;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct SurfaceCapabilitiesKHR;
- struct SurfaceFormat2KHR;
- struct SurfaceFormatKHR;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct SurfaceFullScreenExclusiveInfoEXT;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct SurfaceFullScreenExclusiveWin32InfoEXT;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct SurfaceProtectedCapabilitiesKHR;
- struct SwapchainCounterCreateInfoEXT;
- struct SwapchainCreateInfoKHR;
- struct SwapchainDisplayNativeHdrCreateInfoAMD;
- struct TextureLODGatherFormatPropertiesAMD;
- struct TimelineSemaphoreSubmitInfo;
- using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
- struct ValidationCacheCreateInfoEXT;
- struct ValidationFeaturesEXT;
- struct ValidationFlagsEXT;
- struct VertexInputAttributeDescription;
- struct VertexInputBindingDescription;
- struct VertexInputBindingDivisorDescriptionEXT;
-#ifdef VK_USE_PLATFORM_VI_NN
- struct ViSurfaceCreateInfoNN;
-#endif /*VK_USE_PLATFORM_VI_NN*/
- struct Viewport;
- struct ViewportSwizzleNV;
- struct ViewportWScalingNV;
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- struct WaylandSurfaceCreateInfoKHR;
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct Win32KeyedMutexAcquireReleaseInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct Win32KeyedMutexAcquireReleaseInfoNV;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct Win32SurfaceCreateInfoKHR;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct WriteDescriptorSet;
- struct WriteDescriptorSetAccelerationStructureNV;
- struct WriteDescriptorSetInlineUniformBlockEXT;
- struct XYColorEXT;
-#ifdef VK_USE_PLATFORM_XCB_KHR
- struct XcbSurfaceCreateInfoKHR;
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- struct XlibSurfaceCreateInfoKHR;
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
- class SurfaceKHR
- {
- public:
- using CType = VkSurfaceKHR;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR;
-
- public:
- VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT
- : m_surfaceKHR(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_surfaceKHR(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
- : m_surfaceKHR( surfaceKHR )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT
- {
- m_surfaceKHR = surfaceKHR;
- return *this;
- }
-#endif
-
- SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_surfaceKHR = VK_NULL_HANDLE;
- return *this;
- }
-
- bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_surfaceKHR == rhs.m_surfaceKHR;
- }
-
- bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_surfaceKHR != rhs.m_surfaceKHR;
- }
-
- bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_surfaceKHR < rhs.m_surfaceKHR;
- }
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
+ template <typename T, typename D>
+ VULKAN_HPP_INLINE ResultValue<UniqueHandle<T, D>>
+ createResultValue( Result result,
+ T & data,
+ char const * message,
+ std::initializer_list<Result> successCodes,
+ typename UniqueHandleTraits<T, D>::deleter const & deleter )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ ignore( message );
+ VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+# else
+ if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
{
- return m_surfaceKHR;
+ throwResultException( result, message );
}
+# endif
+ return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
+ }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ template <typename T, typename D>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<T, D>>>::type
+ createResultValue( Result result, std::vector<UniqueHandle<T, D>> && data, char const * message )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ ignore( message );
+ VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+ return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
+# else
+ if ( result != Result::eSuccess )
{
- return m_surfaceKHR != VK_NULL_HANDLE;
+ throwResultException( result, message );
}
+ return std::move( data );
+# endif
+ }
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ template <typename T, typename D>
+ VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<T, D>>>
+ createResultValue( Result result,
+ std::vector<UniqueHandle<T, D>> && data,
+ char const * message,
+ std::initializer_list<Result> successCodes )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ ignore( message );
+ VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+# else
+ if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
{
- return m_surfaceKHR == VK_NULL_HANDLE;
+ throwResultException( result, message );
}
+# endif
+ return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
+ }
+#endif
- private:
- VkSurfaceKHR m_surfaceKHR;
- };
- static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eSurfaceKHR>
- {
- using type = SurfaceKHR;
- };
-
- class DebugReportCallbackEXT
+ struct AabbPositionsKHR
{
- public:
- using CType = VkDebugReportCallbackEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT;
- public:
- VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT
- : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT
+ : minX( minX_ ), minY( minY_ ), minZ( minZ_ ), maxX( maxX_ ), maxY( maxY_ ), maxZ( maxZ_ )
{}
- VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_debugReportCallbackEXT(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
- : m_debugReportCallbackEXT( debugReportCallbackEXT )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_debugReportCallbackEXT = debugReportCallbackEXT;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_debugReportCallbackEXT = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
return *this;
}
- bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AabbPositionsKHR ) );
+ return *this;
}
- bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
{
- return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+ minX = minX_;
+ return *this;
}
- bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
{
- return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+ minY = minY_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
{
- return m_debugReportCallbackEXT;
+ minZ = minZ_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
{
- return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+ maxX = maxX_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
{
- return m_debugReportCallbackEXT == VK_NULL_HANDLE;
- }
-
- private:
- VkDebugReportCallbackEXT m_debugReportCallbackEXT;
- };
- static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eDebugReportCallbackEXT>
- {
- using type = DebugReportCallbackEXT;
- };
-
- class DebugUtilsMessengerEXT
- {
- public:
- using CType = VkDebugUtilsMessengerEXT;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT;
-
- public:
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT
- : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
- : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT
- {
- m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
+ maxY = maxY_;
return *this;
}
-#endif
- DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
{
- m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
+ maxZ = maxZ_;
return *this;
}
- bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
- }
- bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
+ return *reinterpret_cast<const VkAabbPositionsKHR*>( this );
}
- bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
+ return *reinterpret_cast<VkAabbPositionsKHR*>( this );
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
- {
- return m_debugUtilsMessengerEXT;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AabbPositionsKHR const& ) const = default;
+#else
+ bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
+ return ( minX == rhs.minX )
+ && ( minY == rhs.minY )
+ && ( minZ == rhs.minZ )
+ && ( maxX == rhs.maxX )
+ && ( maxY == rhs.maxY )
+ && ( maxZ == rhs.maxZ );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- private:
- VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
- };
- static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
- template <>
- struct cpp_type<ObjectType::eDebugUtilsMessengerEXT>
- {
- using type = DebugUtilsMessengerEXT;
+
+ public:
+ float minX = {};
+ float minY = {};
+ float minZ = {};
+ float maxX = {};
+ float maxY = {};
+ float maxZ = {};
+
};
+ static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
+ using AabbPositionsNV = AabbPositionsKHR;
- class DisplayKHR
+ class AccelerationStructureKHR
{
public:
- using CType = VkDisplayKHR;
+ using CType = VkAccelerationStructureKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
public:
- VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
- : m_displayKHR(VK_NULL_HANDLE)
+ VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT
+ : m_accelerationStructureKHR(VK_NULL_HANDLE)
{}
- VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_displayKHR(VK_NULL_HANDLE)
+ VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_accelerationStructureKHR(VK_NULL_HANDLE)
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
- : m_displayKHR( displayKHR )
+ VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
+ : m_accelerationStructureKHR( accelerationStructureKHR )
{}
#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT
{
- m_displayKHR = displayKHR;
+ m_accelerationStructureKHR = accelerationStructureKHR;
return *this;
}
#endif
- DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- m_displayKHR = VK_NULL_HANDLE;
+ m_accelerationStructureKHR = VK_NULL_HANDLE;
return *this;
}
- bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureKHR const& ) const = default;
+#else
+ bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_displayKHR == rhs.m_displayKHR;
+ return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR;
}
- bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_displayKHR != rhs.m_displayKHR;
+ return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR;
}
- bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_displayKHR < rhs.m_displayKHR;
+ return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR;
}
+#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
{
- return m_displayKHR;
+ return m_accelerationStructureKHR;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return m_displayKHR != VK_NULL_HANDLE;
+ return m_accelerationStructureKHR != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return m_displayKHR == VK_NULL_HANDLE;
+ return m_accelerationStructureKHR == VK_NULL_HANDLE;
}
private:
- VkDisplayKHR m_displayKHR;
+ VkAccelerationStructureKHR m_accelerationStructureKHR;
};
- static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" );
template <>
- struct cpp_type<ObjectType::eDisplayKHR>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eAccelerationStructureKHR>
{
- using type = DisplayKHR;
+ using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
};
- class SwapchainKHR
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
{
- public:
- using CType = VkSwapchainKHR;
+ using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+ };
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR;
- public:
- VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT
- : m_swapchainKHR(VK_NULL_HANDLE)
- {}
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+ };
- VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_swapchainKHR(VK_NULL_HANDLE)
- {}
- VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
- : m_swapchainKHR( swapchainKHR )
- {}
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+ using AccelerationStructureNV = AccelerationStructureKHR;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ union DeviceOrHostAddressConstKHR
+ {
+ DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT
{
- m_swapchainKHR = swapchainKHR;
- return *this;
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
}
-#endif
- SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_swapchainKHR = VK_NULL_HANDLE;
- return *this;
- }
+ DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+ : deviceAddress( deviceAddress_ )
+ {}
- bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_swapchainKHR == rhs.m_swapchainKHR;
- }
+ DeviceOrHostAddressConstKHR( const void* hostAddress_ )
+ : hostAddress( hostAddress_ )
+ {}
- bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
- return m_swapchainKHR != rhs.m_swapchainKHR;
+ deviceAddress = deviceAddress_;
+ return *this;
}
- bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT
{
- return m_swapchainKHR < rhs.m_swapchainKHR;
+ hostAddress = hostAddress_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_swapchainKHR;
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceOrHostAddressConstKHR const&() const
{
- return m_swapchainKHR != VK_NULL_HANDLE;
+ return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR*>(this);
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceOrHostAddressConstKHR &()
{
- return m_swapchainKHR == VK_NULL_HANDLE;
+ return *reinterpret_cast<VkDeviceOrHostAddressConstKHR*>(this);
}
- private:
- VkSwapchainKHR m_swapchainKHR;
- };
- static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eSwapchainKHR>
- {
- using type = SwapchainKHR;
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+ const void* hostAddress;
+#else
+ VkDeviceAddress deviceAddress;
+ const void* hostAddress;
+#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class Semaphore
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureGeometryTrianglesDataKHR
{
- public:
- using CType = VkSemaphore;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore;
-
- public:
- VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT
- : m_semaphore(VK_NULL_HANDLE)
- {}
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
- VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_semaphore(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}) VULKAN_HPP_NOEXCEPT
+ : vertexFormat( vertexFormat_ ), vertexData( vertexData_ ), vertexStride( vertexStride_ ), indexType( indexType_ ), indexData( indexData_ ), transformData( transformData_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
- : m_semaphore( semaphore )
- {}
+ AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_semaphore = semaphore;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_semaphore = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
return *this;
}
- bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_semaphore == rhs.m_semaphore;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryTrianglesDataKHR ) );
+ return *this;
}
- bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_semaphore != rhs.m_semaphore;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
- return m_semaphore < rhs.m_semaphore;
+ vertexFormat = vertexFormat_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
- return m_semaphore;
+ vertexData = vertexData_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
- return m_semaphore != VK_NULL_HANDLE;
+ vertexStride = vertexStride_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
- return m_semaphore == VK_NULL_HANDLE;
+ indexType = indexType_;
+ return *this;
}
- private:
- VkSemaphore m_semaphore;
- };
- static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eSemaphore>
- {
- using type = Semaphore;
- };
-
- class Fence
- {
- public:
- using CType = VkFence;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence;
-
- public:
- VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT
- : m_fence(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_fence(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT
- : m_fence( fence )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
{
- m_fence = fence;
+ indexData = indexData_;
return *this;
}
-#endif
- Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
{
- m_fence = VK_NULL_HANDLE;
+ transformData = transformData_;
return *this;
}
- bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_fence == rhs.m_fence;
- }
- bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_fence != rhs.m_fence;
+ return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
}
- bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_fence < rhs.m_fence;
+ return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
- {
- return m_fence;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_fence != VK_NULL_HANDLE;
- }
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_fence == VK_NULL_HANDLE;
- }
- private:
- VkFence m_fence;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
+
};
- static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureGeometryTrianglesDataKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eFence>
+ struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
{
- using type = Fence;
+ using Type = AccelerationStructureGeometryTrianglesDataKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class PerformanceConfigurationINTEL
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureGeometryAabbsDataKHR
{
- public:
- using CType = VkPerformanceConfigurationINTEL;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePerformanceConfigurationINTEL;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
- public:
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
- : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}) VULKAN_HPP_NOEXCEPT
+ : data( data_ ), stride( stride_ )
{}
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
- {}
+ AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
- : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_performanceConfigurationINTEL = performanceConfigurationINTEL;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_performanceConfigurationINTEL = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
return *this;
}
- bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryAabbsDataKHR ) );
+ return *this;
}
- bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
- return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+ data = data_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
{
- return m_performanceConfigurationINTEL;
+ stride = stride_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+ operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
+ return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
+ return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
}
- private:
- VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+
};
- static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureGeometryAabbsDataKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::ePerformanceConfigurationINTEL>
+ struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
{
- using type = PerformanceConfigurationINTEL;
+ using Type = AccelerationStructureGeometryAabbsDataKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class QueryPool
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureGeometryInstancesDataKHR
{
- public:
- using CType = VkQueryPool;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
- public:
- VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
- : m_queryPool(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}) VULKAN_HPP_NOEXCEPT
+ : arrayOfPointers( arrayOfPointers_ ), data( data_ )
{}
- VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_queryPool(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
- : m_queryPool( queryPool )
- {}
+ AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_queryPool = queryPool;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_queryPool = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
return *this;
}
- bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_queryPool == rhs.m_queryPool;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryInstancesDataKHR ) );
+ return *this;
}
- bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_queryPool != rhs.m_queryPool;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
{
- return m_queryPool < rhs.m_queryPool;
+ arrayOfPointers = arrayOfPointers_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
- return m_queryPool;
+ data = data_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+ operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_queryPool != VK_NULL_HANDLE;
+ return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_queryPool == VK_NULL_HANDLE;
+ return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
}
- private:
- VkQueryPool m_queryPool;
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+
};
- static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureGeometryInstancesDataKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eQueryPool>
+ struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
{
- using type = QueryPool;
+ using Type = AccelerationStructureGeometryInstancesDataKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class Buffer
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ union AccelerationStructureGeometryDataKHR
{
- public:
- using CType = VkBuffer;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer;
+ AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
+ }
- public:
- VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT
- : m_buffer(VK_NULL_HANDLE)
+ AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
+ : triangles( triangles_ )
{}
- VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_buffer(VK_NULL_HANDLE)
+ AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
+ : aabbs( aabbs_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
- : m_buffer( buffer )
+ AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
+ : instances( instances_ )
{}
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
{
- m_buffer = buffer;
+ triangles = triangles_;
return *this;
}
-#endif
- Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
{
- m_buffer = VK_NULL_HANDLE;
+ aabbs = aabbs_;
return *this;
}
- bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_buffer == rhs.m_buffer;
- }
-
- bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
{
- return m_buffer != rhs.m_buffer;
- }
-
- bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_buffer < rhs.m_buffer;
+ instances = instances_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_buffer;
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryDataKHR const&() const
{
- return m_buffer != VK_NULL_HANDLE;
+ return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR*>(this);
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryDataKHR &()
{
- return m_buffer == VK_NULL_HANDLE;
+ return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR*>(this);
}
- private:
- VkBuffer m_buffer;
- };
- static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eBuffer>
- {
- using type = Buffer;
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
+#else
+ VkAccelerationStructureGeometryTrianglesDataKHR triangles;
+ VkAccelerationStructureGeometryAabbsDataKHR aabbs;
+ VkAccelerationStructureGeometryInstancesDataKHR instances;
+#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class PipelineLayout
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureGeometryKHR
{
- public:
- using CType = VkPipelineLayout;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
- public:
- VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT
- : m_pipelineLayout(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
{}
- VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_pipelineLayout(VK_NULL_HANDLE)
- {}
+ AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
- : m_pipelineLayout( pipelineLayout )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_pipelineLayout = pipelineLayout;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_pipelineLayout = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
return *this;
}
- bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineLayout == rhs.m_pipelineLayout;
- }
-
- bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_pipelineLayout != rhs.m_pipelineLayout;
- }
-
- bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineLayout < rhs.m_pipelineLayout;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryKHR ) );
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_pipelineLayout;
+ pNext = pNext_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
- return m_pipelineLayout != VK_NULL_HANDLE;
+ geometryType = geometryType_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
{
- return m_pipelineLayout == VK_NULL_HANDLE;
- }
-
- private:
- VkPipelineLayout m_pipelineLayout;
- };
- static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::ePipelineLayout>
- {
- using type = PipelineLayout;
- };
-
- class DescriptorSet
- {
- public:
- using CType = VkDescriptorSet;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet;
-
- public:
- VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
- : m_descriptorSet(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSet(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSet( descriptorSet )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
- {
- m_descriptorSet = descriptorSet;
+ geometry = geometry_;
return *this;
}
-#endif
- DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorSet = VK_NULL_HANDLE;
+ flags = flags_;
return *this;
}
- bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorSet == rhs.m_descriptorSet;
- }
- bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet != rhs.m_descriptorSet;
+ return *reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
}
- bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet < rhs.m_descriptorSet;
+ return *reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorSet;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorSet != VK_NULL_HANDLE;
- }
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorSet == VK_NULL_HANDLE;
- }
- private:
- VkDescriptorSet m_descriptorSet;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
+ VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
+
};
- static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eDescriptorSet>
+ struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
{
- using type = DescriptorSet;
+ using Type = AccelerationStructureGeometryKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class Pipeline
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ union DeviceOrHostAddressKHR
{
- public:
- using CType = VkPipeline;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline;
-
- public:
- VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
- : m_pipeline(VK_NULL_HANDLE)
- {}
+ DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
+ }
- VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_pipeline(VK_NULL_HANDLE)
+ DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+ : deviceAddress( deviceAddress_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
- : m_pipeline( pipeline )
+ DeviceOrHostAddressKHR( void* hostAddress_ )
+ : hostAddress( hostAddress_ )
{}
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
+ DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
- m_pipeline = pipeline;
+ deviceAddress = deviceAddress_;
return *this;
}
-#endif
- Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT
{
- m_pipeline = VK_NULL_HANDLE;
+ hostAddress = hostAddress_;
return *this;
}
- bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_pipeline == rhs.m_pipeline;
- }
-
- bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipeline != rhs.m_pipeline;
- }
-
- bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipeline < rhs.m_pipeline;
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceOrHostAddressKHR const&() const
{
- return m_pipeline;
+ return *reinterpret_cast<const VkDeviceOrHostAddressKHR*>(this);
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceOrHostAddressKHR &()
{
- return m_pipeline != VK_NULL_HANDLE;
+ return *reinterpret_cast<VkDeviceOrHostAddressKHR*>(this);
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_pipeline == VK_NULL_HANDLE;
- }
-
- private:
- VkPipeline m_pipeline;
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+ void* hostAddress;
+#else
+ VkDeviceAddress deviceAddress;
+ void* hostAddress;
+#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
- static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- template <>
- struct cpp_type<ObjectType::ePipeline>
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureBuildGeometryInfoKHR
{
- using type = Pipeline;
- };
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
- class ImageView
- {
- public:
- using CType = VkImageView;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView;
-
- public:
- VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
- : m_imageView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_imageView(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), flags( flags_ ), update( update_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryArrayOfPointers( geometryArrayOfPointers_ ), geometryCount( geometryCount_ ), ppGeometries( ppGeometries_ ), scratchData( scratchData_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
- : m_imageView( imageView )
- {}
+ AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_imageView = imageView;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_imageView = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
return *this;
}
- bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_imageView == rhs.m_imageView;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureBuildGeometryInfoKHR ) );
+ return *this;
}
- bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_imageView != rhs.m_imageView;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
- return m_imageView < rhs.m_imageView;
+ type = type_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- return m_imageView;
+ flags = flags_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setUpdate( VULKAN_HPP_NAMESPACE::Bool32 update_ ) VULKAN_HPP_NOEXCEPT
{
- return m_imageView != VK_NULL_HANDLE;
+ update = update_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
- return m_imageView == VK_NULL_HANDLE;
+ srcAccelerationStructure = srcAccelerationStructure_;
+ return *this;
}
- private:
- VkImageView m_imageView;
- };
- static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eImageView>
- {
- using type = ImageView;
- };
-
- class Image
- {
- public:
- using CType = VkImage;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage;
-
- public:
- VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT
- : m_image(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_image(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT
- : m_image( image )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
- m_image = image;
+ dstAccelerationStructure = dstAccelerationStructure_;
return *this;
}
-#endif
- Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
{
- m_image = VK_NULL_HANDLE;
+ geometryArrayOfPointers = geometryArrayOfPointers_;
return *this;
}
- bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_image == rhs.m_image;
+ geometryCount = geometryCount_;
+ return *this;
}
- bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
{
- return m_image != rhs.m_image;
+ ppGeometries = ppGeometries_;
+ return *this;
}
- bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
{
- return m_image < rhs.m_image;
+ scratchData = scratchData_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
- {
- return m_image;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_image != VK_NULL_HANDLE;
+ return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_image == VK_NULL_HANDLE;
+ return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
}
- private:
- VkImage m_image;
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 update = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
+ VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {};
+ uint32_t geometryCount = {};
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
+
};
- static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureBuildGeometryInfoKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eImage>
+ struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
{
- using type = Image;
+ using Type = AccelerationStructureBuildGeometryInfoKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class AccelerationStructureNV
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureBuildOffsetInfoKHR
{
- public:
- using CType = VkAccelerationStructureNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureNV;
- public:
- VULKAN_HPP_CONSTEXPR AccelerationStructureNV() VULKAN_HPP_NOEXCEPT
- : m_accelerationStructureNV(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : primitiveCount( primitiveCount_ ), primitiveOffset( primitiveOffset_ ), firstVertex( firstVertex_ ), transformOffset( transformOffset_ )
{}
- VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_accelerationStructureNV(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
- : m_accelerationStructureNV( accelerationStructureNV )
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_accelerationStructureNV = accelerationStructureNV;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR & operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_accelerationStructureNV = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR const *>( &rhs );
return *this;
}
- bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_accelerationStructureNV == rhs.m_accelerationStructureNV;
- }
-
- bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR & operator=( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_accelerationStructureNV != rhs.m_accelerationStructureNV;
- }
-
- bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_accelerationStructureNV < rhs.m_accelerationStructureNV;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureBuildOffsetInfoKHR ) );
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_accelerationStructureNV;
+ primitiveCount = primitiveCount_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return m_accelerationStructureNV != VK_NULL_HANDLE;
+ primitiveOffset = primitiveOffset_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
{
- return m_accelerationStructureNV == VK_NULL_HANDLE;
- }
-
- private:
- VkAccelerationStructureNV m_accelerationStructureNV;
- };
- static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eAccelerationStructureNV>
- {
- using type = AccelerationStructureNV;
- };
-
- class DescriptorUpdateTemplate
- {
- public:
- using CType = VkDescriptorUpdateTemplate;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate;
-
- public:
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
- : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
- : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
- {
- m_descriptorUpdateTemplate = descriptorUpdateTemplate;
+ firstVertex = firstVertex_;
return *this;
}
-#endif
- DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildOffsetInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+ transformOffset = transformOffset_;
return *this;
}
- bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
- }
- bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildOffsetInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+ return *reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR*>( this );
}
- bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildOffsetInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+ return *reinterpret_cast<VkAccelerationStructureBuildOffsetInfoKHR*>( this );
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const& ) const = default;
+#else
+ bool operator==( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+ return ( primitiveCount == rhs.primitiveCount )
+ && ( primitiveOffset == rhs.primitiveOffset )
+ && ( firstVertex == rhs.firstVertex )
+ && ( transformOffset == rhs.transformOffset );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- private:
- VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
- };
- static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
- template <>
- struct cpp_type<ObjectType::eDescriptorUpdateTemplate>
- {
- using type = DescriptorUpdateTemplate;
- };
- using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
- class Event
- {
public:
- using CType = VkEvent;
+ uint32_t primitiveCount = {};
+ uint32_t primitiveOffset = {};
+ uint32_t firstVertex = {};
+ uint32_t transformOffset = {};
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent;
+ };
+ static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureBuildOffsetInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- public:
- VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
- : m_event(VK_NULL_HANDLE)
- {}
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureCreateGeometryTypeInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR;
- VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_event(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, uint32_t maxPrimitiveCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, uint32_t maxVertexCount_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {}) VULKAN_HPP_NOEXCEPT
+ : geometryType( geometryType_ ), maxPrimitiveCount( maxPrimitiveCount_ ), indexType( indexType_ ), maxVertexCount( maxVertexCount_ ), vertexFormat( vertexFormat_ ), allowsTransforms( allowsTransforms_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
- : m_event( event )
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_event = event;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_event = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR const *>( &rhs );
return *this;
}
- bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_event == rhs.m_event;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) );
+ return *this;
}
- bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_event != rhs.m_event;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
- return m_event < rhs.m_event;
+ geometryType = geometryType_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_event;
+ maxPrimitiveCount = maxPrimitiveCount_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
- return m_event != VK_NULL_HANDLE;
+ indexType = indexType_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setMaxVertexCount( uint32_t maxVertexCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_event == VK_NULL_HANDLE;
+ maxVertexCount = maxVertexCount_;
+ return *this;
}
- private:
- VkEvent m_event;
- };
- static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eEvent>
- {
- using type = Event;
- };
-
- class CommandBuffer
- {
- public:
- using CType = VkCommandBuffer;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer;
-
- public:
- VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
- : m_commandBuffer(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_commandBuffer(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
- : m_commandBuffer( commandBuffer )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
- m_commandBuffer = commandBuffer;
+ vertexFormat = vertexFormat_;
return *this;
}
-#endif
- CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateGeometryTypeInfoKHR & setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT
{
- m_commandBuffer = VK_NULL_HANDLE;
+ allowsTransforms = allowsTransforms_;
return *this;
}
- bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer == rhs.m_commandBuffer;
- }
- bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateGeometryTypeInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_commandBuffer != rhs.m_commandBuffer;
+ return *reinterpret_cast<const VkAccelerationStructureCreateGeometryTypeInfoKHR*>( this );
}
- bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateGeometryTypeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_commandBuffer < rhs.m_commandBuffer;
+ return *reinterpret_cast<VkAccelerationStructureCreateGeometryTypeInfoKHR*>( this );
}
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endConditionalRenderingEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setBlendConstants( const float blendConstants[4], Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMask( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const& ) const = default;
#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+ bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_commandBuffer;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( geometryType == rhs.geometryType )
+ && ( maxPrimitiveCount == rhs.maxPrimitiveCount )
+ && ( indexType == rhs.indexType )
+ && ( maxVertexCount == rhs.maxVertexCount )
+ && ( vertexFormat == rhs.vertexFormat )
+ && ( allowsTransforms == rhs.allowsTransforms );
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_commandBuffer != VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer == VK_NULL_HANDLE;
- }
- private:
- VkCommandBuffer m_commandBuffer;
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+ uint32_t maxPrimitiveCount = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ uint32_t maxVertexCount = {};
+ VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {};
+
};
- static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureCreateGeometryTypeInfoKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eCommandBuffer>
+ struct CppType<StructureType, StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR>
{
- using type = CommandBuffer;
+ using Type = AccelerationStructureCreateGeometryTypeInfoKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class DeviceMemory
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureCreateInfoKHR
{
- public:
- using CType = VkDeviceMemory;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
- public:
- VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT
- : m_deviceMemory(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, uint32_t maxGeometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
+ : compactedSize( compactedSize_ ), type( type_ ), flags( flags_ ), maxGeometryCount( maxGeometryCount_ ), pGeometryInfos( pGeometryInfos_ ), deviceAddress( deviceAddress_ )
{}
- VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_deviceMemory(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
- : m_deviceMemory( deviceMemory )
+ AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR> const & geometryInfos_, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+ : compactedSize( compactedSize_ ), type( type_ ), flags( flags_ ), maxGeometryCount( static_cast<uint32_t>( geometryInfos_.size() ) ), pGeometryInfos( geometryInfos_.data() ), deviceAddress( deviceAddress_ )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_deviceMemory = deviceMemory;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
return *this;
}
-#endif
- DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_deviceMemory = VK_NULL_HANDLE;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureCreateInfoKHR ) );
return *this;
}
- bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_deviceMemory == rhs.m_deviceMemory;
+ pNext = pNext_;
+ return *this;
}
- bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
{
- return m_deviceMemory != rhs.m_deviceMemory;
+ compactedSize = compactedSize_;
+ return *this;
}
- bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
- return m_deviceMemory < rhs.m_deviceMemory;
+ type = type_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- return m_deviceMemory;
+ flags = flags_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setMaxGeometryCount( uint32_t maxGeometryCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_deviceMemory != VK_NULL_HANDLE;
+ maxGeometryCount = maxGeometryCount_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setPGeometryInfos( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT
{
- return m_deviceMemory == VK_NULL_HANDLE;
+ pGeometryInfos = pGeometryInfos_;
+ return *this;
}
- private:
- VkDeviceMemory m_deviceMemory;
- };
- static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eDeviceMemory>
- {
- using type = DeviceMemory;
- };
-
- class BufferView
- {
- public:
- using CType = VkBufferView;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView;
-
- public:
- VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
- : m_bufferView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_bufferView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
- : m_bufferView( bufferView )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ AccelerationStructureCreateInfoKHR & setGeometryInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR> const & geometryInfos_ ) VULKAN_HPP_NOEXCEPT
{
- m_bufferView = bufferView;
+ maxGeometryCount = static_cast<uint32_t>( geometryInfos_.size() );
+ pGeometryInfos = geometryInfos_.data();
return *this;
}
-#endif
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
- m_bufferView = VK_NULL_HANDLE;
+ deviceAddress = deviceAddress_;
return *this;
}
- bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView == rhs.m_bufferView;
- }
- bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_bufferView != rhs.m_bufferView;
+ return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
}
- bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_bufferView < rhs.m_bufferView;
+ return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_bufferView != VK_NULL_HANDLE;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( compactedSize == rhs.compactedSize )
+ && ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( maxGeometryCount == rhs.maxGeometryCount )
+ && ( pGeometryInfos == rhs.pGeometryInfos )
+ && ( deviceAddress == rhs.deviceAddress );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_bufferView == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
+ uint32_t maxGeometryCount = {};
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
- private:
- VkBufferView m_bufferView;
};
- static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eBufferView>
+ struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
{
- using type = BufferView;
+ using Type = AccelerationStructureCreateInfoKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- class CommandPool
+ class Buffer
{
public:
- using CType = VkCommandPool;
+ using CType = VkBuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
public:
- VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT
- : m_commandPool(VK_NULL_HANDLE)
+ VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT
+ : m_buffer(VK_NULL_HANDLE)
{}
- VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_commandPool(VK_NULL_HANDLE)
+ VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_buffer(VK_NULL_HANDLE)
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
- : m_commandPool( commandPool )
+ VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
+ : m_buffer( buffer )
{}
#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT
+ Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT
{
- m_commandPool = commandPool;
+ m_buffer = buffer;
return *this;
}
#endif
- CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- m_commandPool = VK_NULL_HANDLE;
+ m_buffer = VK_NULL_HANDLE;
return *this;
}
- bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Buffer const& ) const = default;
+#else
+ bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_commandPool == rhs.m_commandPool;
+ return m_buffer == rhs.m_buffer;
}
- bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_commandPool != rhs.m_commandPool;
+ return m_buffer != rhs.m_buffer;
}
- bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_commandPool < rhs.m_commandPool;
+ return m_buffer < rhs.m_buffer;
}
+#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
{
- return m_commandPool;
+ return m_buffer;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return m_commandPool != VK_NULL_HANDLE;
+ return m_buffer != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return m_commandPool == VK_NULL_HANDLE;
+ return m_buffer == VK_NULL_HANDLE;
}
private:
- VkCommandPool m_commandPool;
+ VkBuffer m_buffer;
};
- static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
template <>
- struct cpp_type<ObjectType::eCommandPool>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBuffer>
{
- using type = CommandPool;
+ using type = VULKAN_HPP_NAMESPACE::Buffer;
};
- class PipelineCache
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBuffer>
{
- public:
- using CType = VkPipelineCache;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache;
-
- public:
- VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT
- : m_pipelineCache(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_pipelineCache(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
- : m_pipelineCache( pipelineCache )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT
- {
- m_pipelineCache = pipelineCache;
- return *this;
- }
-#endif
-
- PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_pipelineCache = VK_NULL_HANDLE;
- return *this;
- }
-
- bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineCache == rhs.m_pipelineCache;
- }
-
- bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineCache != rhs.m_pipelineCache;
- }
-
- bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineCache < rhs.m_pipelineCache;
- }
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineCache;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineCache != VK_NULL_HANDLE;
- }
+ using Type = VULKAN_HPP_NAMESPACE::Buffer;
+ };
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_pipelineCache == VK_NULL_HANDLE;
- }
- private:
- VkPipelineCache m_pipelineCache;
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Buffer;
};
- static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+
template <>
- struct cpp_type<ObjectType::ePipelineCache>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Buffer>
{
- using type = PipelineCache;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- class DescriptorPool
+ struct GeometryTrianglesNV
{
- public:
- using CType = VkDescriptorPool;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool;
-
- public:
- VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
- : m_descriptorPool(VK_NULL_HANDLE)
- {}
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
- VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_descriptorPool(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : vertexData( vertexData_ ), vertexOffset( vertexOffset_ ), vertexCount( vertexCount_ ), vertexStride( vertexStride_ ), vertexFormat( vertexFormat_ ), indexData( indexData_ ), indexOffset( indexOffset_ ), indexCount( indexCount_ ), indexType( indexType_ ), transformData( transformData_ ), transformOffset( transformOffset_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
- : m_descriptorPool( descriptorPool )
- {}
+ VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorPool = descriptorPool;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorPool = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
return *this;
}
- bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorPool == rhs.m_descriptorPool;
- }
-
- bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorPool != rhs.m_descriptorPool;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryTrianglesNV ) );
+ return *this;
}
- bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorPool < rhs.m_descriptorPool;
+ pNext = pNext_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorPool;
+ vertexData = vertexData_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorPool != VK_NULL_HANDLE;
+ vertexOffset = vertexOffset_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorPool == VK_NULL_HANDLE;
+ vertexCount = vertexCount_;
+ return *this;
}
- private:
- VkDescriptorPool m_descriptorPool;
- };
- static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eDescriptorPool>
- {
- using type = DescriptorPool;
- };
-
- class DescriptorSetLayout
- {
- public:
- using CType = VkDescriptorSetLayout;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout;
-
- public:
- VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
- : m_descriptorSetLayout(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSetLayout(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSetLayout( descriptorSetLayout )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorSetLayout = descriptorSetLayout;
+ vertexStride = vertexStride_;
return *this;
}
-#endif
- DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorSetLayout = VK_NULL_HANDLE;
+ vertexFormat = vertexFormat_;
return *this;
}
- bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+ indexData = indexData_;
+ return *this;
}
- bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+ indexOffset = indexOffset_;
+ return *this;
}
- bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+ indexCount = indexCount_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSetLayout;
+ indexType = indexType_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSetLayout != VK_NULL_HANDLE;
+ transformData = transformData_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSetLayout == VK_NULL_HANDLE;
+ transformOffset = transformOffset_;
+ return *this;
}
- private:
- VkDescriptorSetLayout m_descriptorSetLayout;
- };
- static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eDescriptorSetLayout>
- {
- using type = DescriptorSetLayout;
- };
- class Framebuffer
- {
- public:
- using CType = VkFramebuffer;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer;
-
- public:
- VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT
- : m_framebuffer(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_framebuffer(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
- : m_framebuffer( framebuffer )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT
+ operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT
{
- m_framebuffer = framebuffer;
- return *this;
+ return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
}
-#endif
- Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
{
- m_framebuffer = VK_NULL_HANDLE;
- return *this;
+ return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
}
- bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_framebuffer == rhs.m_framebuffer;
- }
- bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeometryTrianglesNV const& ) const = default;
+#else
+ bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_framebuffer != rhs.m_framebuffer;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( vertexData == rhs.vertexData )
+ && ( vertexOffset == rhs.vertexOffset )
+ && ( vertexCount == rhs.vertexCount )
+ && ( vertexStride == rhs.vertexStride )
+ && ( vertexFormat == rhs.vertexFormat )
+ && ( indexData == rhs.indexData )
+ && ( indexOffset == rhs.indexOffset )
+ && ( indexCount == rhs.indexCount )
+ && ( indexType == rhs.indexType )
+ && ( transformData == rhs.transformData )
+ && ( transformOffset == rhs.transformOffset );
}
- bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_framebuffer < rhs.m_framebuffer;
+ return !operator==( rhs );
}
+#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
- {
- return m_framebuffer;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_framebuffer != VK_NULL_HANDLE;
- }
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_framebuffer == VK_NULL_HANDLE;
- }
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
+ uint32_t vertexCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+ VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Buffer indexData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
+ uint32_t indexCount = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ VULKAN_HPP_NAMESPACE::Buffer transformData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
- private:
- VkFramebuffer m_framebuffer;
};
- static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+ static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eFramebuffer>
+ struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
{
- using type = Framebuffer;
+ using Type = GeometryTrianglesNV;
};
- class IndirectCommandsLayoutNVX
+ struct GeometryAABBNV
{
- public:
- using CType = VkIndirectCommandsLayoutNVX;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNVX;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;
- public:
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() VULKAN_HPP_NOEXCEPT
- : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+ : aabbData( aabbData_ ), numAABBs( numAABBs_ ), stride( stride_ ), offset( offset_ )
{}
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) VULKAN_HPP_NOEXCEPT
- : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_indirectCommandsLayoutNVX = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
return *this;
}
- bool operator==( IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryAABBNV ) );
+ return *this;
}
- bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
{
- return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
+ aabbData = aabbData_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
{
- return m_indirectCommandsLayoutNVX;
+ numAABBs = numAABBs_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
- return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
+ stride = stride_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
- return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
+ offset = offset_;
+ return *this;
}
- private:
- VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
- };
- static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eIndirectCommandsLayoutNVX>
- {
- using type = IndirectCommandsLayoutNVX;
- };
-
- class ObjectTableNVX
- {
- public:
- using CType = VkObjectTableNVX;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eObjectTableNVX;
-
- public:
- VULKAN_HPP_CONSTEXPR ObjectTableNVX() VULKAN_HPP_NOEXCEPT
- : m_objectTableNVX(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_objectTableNVX(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) VULKAN_HPP_NOEXCEPT
- : m_objectTableNVX( objectTableNVX )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) VULKAN_HPP_NOEXCEPT
+ operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT
{
- m_objectTableNVX = objectTableNVX;
- return *this;
+ return *reinterpret_cast<const VkGeometryAABBNV*>( this );
}
-#endif
- ObjectTableNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
{
- m_objectTableNVX = VK_NULL_HANDLE;
- return *this;
+ return *reinterpret_cast<VkGeometryAABBNV*>( this );
}
- bool operator==( ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_objectTableNVX == rhs.m_objectTableNVX;
- }
- bool operator!=(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeometryAABBNV const& ) const = default;
+#else
+ bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_objectTableNVX != rhs.m_objectTableNVX;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( aabbData == rhs.aabbData )
+ && ( numAABBs == rhs.numAABBs )
+ && ( stride == rhs.stride )
+ && ( offset == rhs.offset );
}
- bool operator<(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_objectTableNVX < rhs.m_objectTableNVX;
+ return !operator==( rhs );
}
+#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const VULKAN_HPP_NOEXCEPT
- {
- return m_objectTableNVX;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_objectTableNVX != VK_NULL_HANDLE;
- }
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_objectTableNVX == VK_NULL_HANDLE;
- }
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
+ uint32_t numAABBs = {};
+ uint32_t stride = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
- private:
- VkObjectTableNVX m_objectTableNVX;
};
- static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
+ static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eObjectTableNVX>
+ struct CppType<StructureType, StructureType::eGeometryAabbNV>
{
- using type = ObjectTableNVX;
+ using Type = GeometryAABBNV;
};
- class RenderPass
+ struct GeometryDataNV
{
- public:
- using CType = VkRenderPass;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass;
- public:
- VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT
- : m_renderPass(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT
+ : triangles( triangles_ ), aabbs( aabbs_ )
{}
- VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_renderPass(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
- : m_renderPass( renderPass )
- {}
+ VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT
+ GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_renderPass = renderPass;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_renderPass = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
return *this;
}
- bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_renderPass == rhs.m_renderPass;
- }
-
- bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_renderPass != rhs.m_renderPass;
- }
-
- bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_renderPass < rhs.m_renderPass;
- }
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
- {
- return m_renderPass;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_renderPass != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_renderPass == VK_NULL_HANDLE;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryDataNV ) );
+ return *this;
}
- private:
- VkRenderPass m_renderPass;
- };
- static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eRenderPass>
- {
- using type = RenderPass;
- };
-
- class Sampler
- {
- public:
- using CType = VkSampler;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler;
-
- public:
- VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
- : m_sampler(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_sampler(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
- : m_sampler( sampler )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+ GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
{
- m_sampler = sampler;
+ triangles = triangles_;
return *this;
}
-#endif
- Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
{
- m_sampler = VK_NULL_HANDLE;
+ aabbs = aabbs_;
return *this;
}
- bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler == rhs.m_sampler;
- }
- bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT
{
- return m_sampler != rhs.m_sampler;
+ return *reinterpret_cast<const VkGeometryDataNV*>( this );
}
- bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
{
- return m_sampler < rhs.m_sampler;
+ return *reinterpret_cast<VkGeometryDataNV*>( this );
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeometryDataNV const& ) const = default;
+#else
+ bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_sampler != VK_NULL_HANDLE;
+ return ( triangles == rhs.triangles )
+ && ( aabbs == rhs.aabbs );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_sampler == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- private:
- VkSampler m_sampler;
- };
- static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
- template <>
- struct cpp_type<ObjectType::eSampler>
- {
- using type = Sampler;
- };
- class SamplerYcbcrConversion
- {
public:
- using CType = VkSamplerYcbcrConversion;
+ VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
+ VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion;
+ };
+ static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
- public:
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT
- : m_samplerYcbcrConversion(VK_NULL_HANDLE)
- {}
+ struct GeometryNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
- : m_samplerYcbcrConversion( samplerYcbcrConversion )
- {}
+ VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT
+ GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_samplerYcbcrConversion = samplerYcbcrConversion;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_samplerYcbcrConversion = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
return *this;
}
- bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
- }
-
- bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryNV ) );
+ return *this;
}
- bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+ GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
+ pNext = pNext_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
+ GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
- return m_samplerYcbcrConversion;
+ geometryType = geometryType_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
{
- return m_samplerYcbcrConversion != VK_NULL_HANDLE;
+ geometry = geometry_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- return m_samplerYcbcrConversion == VK_NULL_HANDLE;
+ flags = flags_;
+ return *this;
}
- private:
- VkSamplerYcbcrConversion m_samplerYcbcrConversion;
- };
- static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eSamplerYcbcrConversion>
- {
- using type = SamplerYcbcrConversion;
- };
- using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
-
- class ShaderModule
- {
- public:
- using CType = VkShaderModule;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule;
- public:
- VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
- : m_shaderModule(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_shaderModule(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
- : m_shaderModule( shaderModule )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+ operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT
{
- m_shaderModule = shaderModule;
- return *this;
+ return *reinterpret_cast<const VkGeometryNV*>( this );
}
-#endif
- ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
{
- m_shaderModule = VK_NULL_HANDLE;
- return *this;
+ return *reinterpret_cast<VkGeometryNV*>( this );
}
- bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_shaderModule == rhs.m_shaderModule;
- }
- bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeometryNV const& ) const = default;
+#else
+ bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule != rhs.m_shaderModule;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( geometryType == rhs.geometryType )
+ && ( geometry == rhs.geometry )
+ && ( flags == rhs.flags );
}
- bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule < rhs.m_shaderModule;
+ return !operator==( rhs );
}
+#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
- {
- return m_shaderModule;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_shaderModule != VK_NULL_HANDLE;
- }
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_shaderModule == VK_NULL_HANDLE;
- }
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+ VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
+ VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
- private:
- VkShaderModule m_shaderModule;
};
- static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+ static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eShaderModule>
+ struct CppType<StructureType, StructureType::eGeometryNV>
{
- using type = ShaderModule;
+ using Type = GeometryNV;
};
- class ValidationCacheEXT
+ struct AccelerationStructureInfoNV
{
- public:
- using CType = VkValidationCacheEXT;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
- public:
- VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT
- : m_validationCacheEXT(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ )
{}
- VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_validationCacheEXT(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
- : m_validationCacheEXT( validationCacheEXT )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_validationCacheEXT = validationCacheEXT;
- return *this;
+ *this = rhs;
}
-#endif
- ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
+ : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast<uint32_t>( geometries_.size() ) ), pGeometries( geometries_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_validationCacheEXT = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
return *this;
}
- bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_validationCacheEXT == rhs.m_validationCacheEXT;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureInfoNV ) );
+ return *this;
}
- bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_validationCacheEXT != rhs.m_validationCacheEXT;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
- return m_validationCacheEXT < rhs.m_validationCacheEXT;
+ type = type_;
+ return *this;
}
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
- return m_validationCacheEXT;
+ flags = flags_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_validationCacheEXT != VK_NULL_HANDLE;
+ instanceCount = instanceCount_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
{
- return m_validationCacheEXT == VK_NULL_HANDLE;
+ geometryCount = geometryCount_;
+ return *this;
}
- private:
- VkValidationCacheEXT m_validationCacheEXT;
- };
- static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eValidationCacheEXT>
- {
- using type = ValidationCacheEXT;
- };
-
- class Queue
- {
- public:
- using CType = VkQueue;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue;
-
- public:
- VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT
- : m_queue(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_queue(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT
- : m_queue( queue )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
- m_queue = queue;
+ pGeometries = pGeometries_;
return *this;
}
-#endif
- Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
{
- m_queue = VK_NULL_HANDLE;
+ geometryCount = static_cast<uint32_t>( geometries_.size() );
+ pGeometries = geometries_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_queue == rhs.m_queue;
- }
- bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- return m_queue != rhs.m_queue;
+ return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
}
- bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
{
- return m_queue < rhs.m_queue;
+ return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
}
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureInfoNV const& ) const = default;
#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
+ bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_queue;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( instanceCount == rhs.instanceCount )
+ && ( geometryCount == rhs.geometryCount )
+ && ( pGeometries == rhs.pGeometries );
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_queue != VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_queue == VK_NULL_HANDLE;
- }
- private:
- VkQueue m_queue;
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {};
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
+ uint32_t instanceCount = {};
+ uint32_t geometryCount = {};
+ const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {};
+
};
- static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eQueue>
+ struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
{
- using type = Queue;
+ using Type = AccelerationStructureInfoNV;
};
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- class Device;
- template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureNV, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Buffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<BufferView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<CommandBuffer, Dispatch> { public: using deleter = PoolFree<Device, CommandPool, Dispatch>; };
- using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<CommandPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<DescriptorPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<DescriptorSet, Dispatch> { public: using deleter = PoolFree<Device, DescriptorPool, Dispatch>; };
- using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<DescriptorSetLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<DeviceMemory, Dispatch> { public: using deleter = ObjectFree<Device, Dispatch>; };
- using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Event, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Fence, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Framebuffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Image, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<ImageView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<IndirectCommandsLayoutNVX, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueIndirectCommandsLayoutNVX = UniqueHandle<IndirectCommandsLayoutNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<ObjectTableNVX, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueObjectTableNVX = UniqueHandle<ObjectTableNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Pipeline, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<PipelineCache, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<PipelineLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<QueryPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<RenderPass, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Sampler, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<Semaphore, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<ShaderModule, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<SwapchainKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<ValidationCacheEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
- using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
- class Device
+ struct AccelerationStructureCreateInfoNV
{
- public:
- using CType = VkDevice;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice;
-
- public:
- VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT
- : m_device(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}) VULKAN_HPP_NOEXCEPT
+ : compactedSize( compactedSize_ ), info( info_ )
{}
- VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_device(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT
- : m_device( device )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_device = device;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_device = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
return *this;
}
- bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_device == rhs.m_device;
- }
-
- bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_device != rhs.m_device;
- }
-
- bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_device < rhs.m_device;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureCreateInfoNV ) );
+ return *this;
}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<Pipeline>::type createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Event,Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<Pipeline>::type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Image,Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<Pipeline>::type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void*>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void releaseProfilingLockKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_device;
+ pNext = pNext_;
+ return *this;
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
{
- return m_device != VK_NULL_HANDLE;
+ compactedSize = compactedSize_;
+ return *this;
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
{
- return m_device == VK_NULL_HANDLE;
+ info = info_;
+ return *this;
}
- private:
- VkDevice m_device;
- };
- static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
-
- template <>
- struct cpp_type<ObjectType::eDevice>
- {
- using type = Device;
- };
-
- class DisplayModeKHR
- {
- public:
- using CType = VkDisplayModeKHR;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR;
-
- public:
- VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT
- : m_displayModeKHR(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_displayModeKHR(VK_NULL_HANDLE)
- {}
- VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
- : m_displayModeKHR( displayModeKHR )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- m_displayModeKHR = displayModeKHR;
- return *this;
+ return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
}
-#endif
- DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
- m_displayModeKHR = VK_NULL_HANDLE;
- return *this;
+ return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
}
- bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_displayModeKHR == rhs.m_displayModeKHR;
- }
- bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default;
+#else
+ bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_displayModeKHR != rhs.m_displayModeKHR;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( compactedSize == rhs.compactedSize )
+ && ( info == rhs.info );
}
- bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_displayModeKHR < rhs.m_displayModeKHR;
+ return !operator==( rhs );
}
+#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
- {
- return m_displayModeKHR;
- }
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_displayModeKHR != VK_NULL_HANDLE;
- }
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_displayModeKHR == VK_NULL_HANDLE;
- }
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
- private:
- VkDisplayModeKHR m_displayModeKHR;
};
- static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::eDisplayModeKHR>
+ struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
{
- using type = DisplayModeKHR;
+ using Type = AccelerationStructureCreateInfoNV;
};
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch> class UniqueHandleTraits<Device, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
- using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
- class PhysicalDevice
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureDeviceAddressInfoKHR
{
- public:
- using CType = VkPhysicalDevice;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
- public:
- VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT
- : m_physicalDevice(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
+ : accelerationStructure( accelerationStructure_ )
{}
- VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_physicalDevice(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
- : m_physicalDevice( physicalDevice )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_physicalDevice = physicalDevice;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_physicalDevice = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
return *this;
}
- bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_physicalDevice == rhs.m_physicalDevice;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureDeviceAddressInfoKHR ) );
+ return *this;
}
- bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return m_physicalDevice != rhs.m_physicalDevice;
+ pNext = pNext_;
+ return *this;
}
- bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
- return m_physicalDevice < rhs.m_physicalDevice;
+ accelerationStructure = accelerationStructure_;
+ return *this;
}
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<Display>::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Device,Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
- template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
+ operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
+ }
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
+ }
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default;
#else
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
+ bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_physicalDevice;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( accelerationStructure == rhs.accelerationStructure );
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_physicalDevice != VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_physicalDevice == VK_NULL_HANDLE;
- }
- private:
- VkPhysicalDevice m_physicalDevice;
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+
};
- static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+ static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureDeviceAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct cpp_type<ObjectType::ePhysicalDevice>
+ struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
{
- using type = PhysicalDevice;
+ using Type = AccelerationStructureDeviceAddressInfoKHR;
};
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- class Instance;
- template <typename Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
- using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
- using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
- template <typename Dispatch> class UniqueHandleTraits<SurfaceKHR, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
- using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
- class Instance
+ struct TransformMatrixKHR
{
- public:
- using CType = VkInstance;
- static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance;
- public:
- VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT
- : m_instance(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array<std::array<float,4>,3> const& matrix_ = {}) VULKAN_HPP_NOEXCEPT
+ : matrix( matrix_ )
{}
- VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_instance(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT
- : m_instance( instance )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT
+ TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_instance = instance;
- return *this;
+ *this = rhs;
}
-#endif
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_instance = VK_NULL_HANDLE;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
return *this;
}
- bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+ TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return m_instance == rhs.m_instance;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( TransformMatrixKHR ) );
+ return *this;
}
- bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+ TransformMatrixKHR & setMatrix( std::array<std::array<float,4>,3> matrix_ ) VULKAN_HPP_NOEXCEPT
{
- return m_instance != rhs.m_instance;
+ matrix = matrix_;
+ return *this;
}
- bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return m_instance < rhs.m_instance;
+ return *reinterpret_cast<const VkTransformMatrixKHR*>( this );
}
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
-
-#ifdef VK_USE_PLATFORM_GGP
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_GGP*/
-
-#ifdef VK_USE_PLATFORM_VI_NN
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_VI_NN*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
- template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
+ operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
{
- return m_instance;
+ return *reinterpret_cast<VkTransformMatrixKHR*>( this );
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( TransformMatrixKHR const& ) const = default;
+#else
+ bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_instance != VK_NULL_HANDLE;
+ return ( matrix == rhs.matrix );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_instance == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- private:
- VkInstance m_instance;
- };
- static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
- template <>
- struct cpp_type<ObjectType::eInstance>
- {
- using type = Instance;
- };
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch> class UniqueHandleTraits<Instance, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
- using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
- template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d );
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ };
+ static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
+ using TransformMatrixNV = TransformMatrixKHR;
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
- template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d );
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ struct AccelerationStructureInstanceKHR
+ {
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- struct GeometryTrianglesNV
- {
- VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {},
- uint32_t vertexCount_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
- VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Buffer indexData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {},
- uint32_t indexCount_ = {},
- VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
- VULKAN_HPP_NAMESPACE::Buffer transformData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexData( vertexData_ )
- , vertexOffset( vertexOffset_ )
- , vertexCount( vertexCount_ )
- , vertexStride( vertexStride_ )
- , vertexFormat( vertexFormat_ )
- , indexData( indexData_ )
- , indexOffset( indexOffset_ )
- , indexCount( indexCount_ )
- , indexType( indexType_ )
- , transformData( transformData_ )
- , transformOffset( transformOffset_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
+ : transform( transform_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
{}
- VULKAN_HPP_NAMESPACE::GeometryTrianglesNV & operator=( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) - offsetof( GeometryTrianglesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
return *this;
}
- GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureInstanceKHR ) );
return *this;
}
- GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
{
- vertexData = vertexData_;
+ transform = transform_;
return *this;
}
- GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
- vertexOffset = vertexOffset_;
+ instanceCustomIndex = instanceCustomIndex_;
return *this;
}
- GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
{
- vertexCount = vertexCount_;
+ mask = mask_;
return *this;
}
- GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
- vertexStride = vertexStride_;
+ instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
- GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- vertexFormat = vertexFormat_;
+ flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
return *this;
}
- GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
- indexData = indexData_;
+ accelerationStructureReference = accelerationStructureReference_;
return *this;
}
- GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- indexOffset = indexOffset_;
- return *this;
- }
- GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT
{
- indexCount = indexCount_;
- return *this;
+ return *reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
}
- GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
{
- indexType = indexType_;
- return *this;
+ return *reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
}
- GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
- {
- transformData = transformData_;
- return *this;
- }
- GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default;
+#else
+ bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- transformOffset = transformOffset_;
- return *this;
+ return ( transform == rhs.transform )
+ && ( instanceCustomIndex == rhs.instanceCustomIndex )
+ && ( mask == rhs.mask )
+ && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
+ && ( flags == rhs.flags )
+ && ( accelerationStructureReference == rhs.accelerationStructureReference );
}
- operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
+ return !operator==( rhs );
}
+#endif
- operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
- }
- bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( vertexData == rhs.vertexData )
- && ( vertexOffset == rhs.vertexOffset )
- && ( vertexCount == rhs.vertexCount )
- && ( vertexStride == rhs.vertexStride )
- && ( vertexFormat == rhs.vertexFormat )
- && ( indexData == rhs.indexData )
- && ( indexOffset == rhs.indexOffset )
- && ( indexCount == rhs.indexCount )
- && ( indexType == rhs.indexType )
- && ( transformData == rhs.transformData )
- && ( transformOffset == rhs.transformOffset );
- }
-
- bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
- VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
- uint32_t vertexCount = {};
- VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
- VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::Buffer indexData = {};
- VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
- uint32_t indexCount = {};
- VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
- VULKAN_HPP_NAMESPACE::Buffer transformData = {};
- VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
+ VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
+ uint32_t instanceCustomIndex : 24;
+ uint32_t mask : 8;
+ uint32_t instanceShaderBindingTableRecordOffset : 24;
+ VkGeometryInstanceFlagsKHR flags : 8;
+ uint64_t accelerationStructureReference = {};
+
};
- static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
+ using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
- struct GeometryAABBNV
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureMemoryRequirementsInfoKHR
{
- VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {},
- uint32_t numAABBs_ = {},
- uint32_t stride_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
- : aabbData( aabbData_ )
- , numAABBs( numAABBs_ )
- , stride( stride_ )
- , offset( offset_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), buildType( buildType_ ), accelerationStructure( accelerationStructure_ )
{}
- VULKAN_HPP_NAMESPACE::GeometryAABBNV & operator=( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) - offsetof( GeometryAABBNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR & operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR const *>( &rhs );
return *this;
}
- GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR & operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) );
return *this;
}
- GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- aabbData = aabbData_;
+ pNext = pNext_;
return *this;
}
- GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
- numAABBs = numAABBs_;
+ type = type_;
return *this;
}
- GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR & setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT
{
- stride = stride_;
+ buildType = buildType_;
return *this;
}
- GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
- offset = offset_;
+ accelerationStructure = accelerationStructure_;
return *this;
}
- operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkAccelerationStructureMemoryRequirementsInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkGeometryAABBNV*>( this );
+ return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR*>( this );
}
- operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMemoryRequirementsInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkGeometryAABBNV*>( this );
+ return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoKHR*>( this );
}
- bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const& ) const = default;
+#else
+ bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( aabbData == rhs.aabbData )
- && ( numAABBs == rhs.numAABBs )
- && ( stride == rhs.stride )
- && ( offset == rhs.offset );
+ && ( type == rhs.type )
+ && ( buildType == rhs.buildType )
+ && ( accelerationStructure == rhs.accelerationStructure );
}
- bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
- uint32_t numAABBs = {};
- uint32_t stride = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+
};
- static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct GeometryDataNV
+ template <>
+ struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoKHR>
{
- VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {},
- VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT
- : triangles( triangles_ )
- , aabbs( aabbs_ )
+ using Type = AccelerationStructureMemoryRequirementsInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ struct AccelerationStructureMemoryRequirementsInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), accelerationStructure( accelerationStructure_ )
{}
- GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
return *this;
}
- GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- triangles = triangles_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) );
+ return *this;
+ }
+
+ AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
return *this;
}
- GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
- aabbs = aabbs_;
+ type = type_;
return *this;
}
- operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkGeometryDataNV*>( this );
+ accelerationStructure = accelerationStructure_;
+ return *this;
}
- operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
+
+ operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkGeometryDataNV*>( this );
+ return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
}
- bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
{
- return ( triangles == rhs.triangles )
- && ( aabbs == rhs.aabbs );
+ return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
}
- bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default;
+#else
+ bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( type == rhs.type )
+ && ( accelerationStructure == rhs.accelerationStructure );
+ }
+
+ bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
- VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+
};
- static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct GeometryNV
+ template <>
+ struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
+ {
+ using Type = AccelerationStructureMemoryRequirementsInfoNV;
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct AccelerationStructureVersionKHR
{
- VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles,
- VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {},
- VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : geometryType( geometryType_ )
- , geometry( geometry_ )
- , flags( flags_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR(const uint8_t* versionData_ = {}) VULKAN_HPP_NOEXCEPT
+ : versionData( versionData_ )
{}
- VULKAN_HPP_NAMESPACE::GeometryNV & operator=( VULKAN_HPP_NAMESPACE::GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) - offsetof( GeometryNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureVersionKHR( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureVersionKHR & operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR const *>( &rhs );
return *this;
}
- GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureVersionKHR & operator=( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureVersionKHR ) );
return *this;
}
- GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureVersionKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- geometryType = geometryType_;
+ pNext = pNext_;
return *this;
}
- GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureVersionKHR & setVersionData( const uint8_t* versionData_ ) VULKAN_HPP_NOEXCEPT
{
- geometry = geometry_;
+ versionData = versionData_;
return *this;
}
- GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
- operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureVersionKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkGeometryNV*>( this );
+ return *reinterpret_cast<const VkAccelerationStructureVersionKHR*>( this );
}
- operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureVersionKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkGeometryNV*>( this );
+ return *reinterpret_cast<VkAccelerationStructureVersionKHR*>( this );
}
- bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AccelerationStructureVersionKHR const& ) const = default;
+#else
+ bool operator==( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( geometryType == rhs.geometryType )
- && ( geometry == rhs.geometry )
- && ( flags == rhs.flags );
+ && ( versionData == rhs.versionData );
}
- bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles;
- VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
- VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags = {};
+ const uint8_t* versionData = {};
+
};
- static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AccelerationStructureVersionKHR>::value, "struct wrapper is not a standard layout!" );
- struct AccelerationStructureInfoNV
+ template <>
+ struct CppType<StructureType, StructureType::eAccelerationStructureVersionKHR>
{
- VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel,
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {},
- uint32_t instanceCount_ = {},
- uint32_t geometryCount_ = {},
- const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , instanceCount( instanceCount_ )
- , geometryCount( geometryCount_ )
- , pGeometries( pGeometries_ )
+ using Type = AccelerationStructureVersionKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ class SwapchainKHR
+ {
+ public:
+ using CType = VkSwapchainKHR;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
+
+ public:
+ VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT
+ : m_swapchainKHR(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_swapchainKHR(VK_NULL_HANDLE)
+ {}
- AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
+ : m_swapchainKHR( swapchainKHR )
+ {}
- AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>(&rhs);
+ m_swapchainKHR = swapchainKHR;
return *this;
}
+#endif
- AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ m_swapchainKHR = VK_NULL_HANDLE;
return *this;
}
- AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SwapchainKHR const& ) const = default;
+#else
+ bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- type = type_;
- return *this;
+ return m_swapchainKHR == rhs.m_swapchainKHR;
}
- AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
- return *this;
+ return m_swapchainKHR != rhs.m_swapchainKHR;
}
- AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+ bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- instanceCount = instanceCount_;
- return *this;
+ return m_swapchainKHR < rhs.m_swapchainKHR;
}
+#endif
- AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
{
- geometryCount = geometryCount_;
- return *this;
+ return m_swapchainKHR;
}
- AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- pGeometries = pGeometries_;
- return *this;
+ return m_swapchainKHR != VK_NULL_HANDLE;
}
- operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
+ return m_swapchainKHR == VK_NULL_HANDLE;
}
- operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
- }
+ private:
+ VkSwapchainKHR m_swapchainKHR;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
- bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( type == rhs.type )
- && ( flags == rhs.flags )
- && ( instanceCount == rhs.instanceCount )
- && ( geometryCount == rhs.geometryCount )
- && ( pGeometries == rhs.pGeometries );
- }
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSwapchainKHR>
+ {
+ using type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+ };
- bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+ };
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel;
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
- uint32_t instanceCount = {};
- uint32_t geometryCount = {};
- const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {};
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
};
- static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct AccelerationStructureCreateInfoNV
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SwapchainKHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class Semaphore
{
- VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {},
- VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT
- : compactedSize( compactedSize_ )
- , info( info_ )
+ public:
+ using CType = VkSemaphore;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT
+ : m_semaphore(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_semaphore(VK_NULL_HANDLE)
+ {}
- AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
+ : m_semaphore( semaphore )
+ {}
- AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>(&rhs);
+ m_semaphore = semaphore;
return *this;
}
+#endif
- AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ m_semaphore = VK_NULL_HANDLE;
return *this;
}
- AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Semaphore const& ) const = default;
+#else
+ bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- compactedSize = compactedSize_;
- return *this;
+ return m_semaphore == rhs.m_semaphore;
}
- AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- info = info_;
- return *this;
+ return m_semaphore != rhs.m_semaphore;
}
- operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
+ return m_semaphore < rhs.m_semaphore;
}
+#endif
- operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
+ return m_semaphore;
}
- bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( compactedSize == rhs.compactedSize )
- && ( info == rhs.info );
+ return m_semaphore != VK_NULL_HANDLE;
}
- bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_semaphore == VK_NULL_HANDLE;
}
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
+ private:
+ VkSemaphore m_semaphore;
};
- static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
- struct AccelerationStructureMemoryRequirementsInfoNV
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSemaphore>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Semaphore;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Semaphore;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Semaphore;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Semaphore>
{
- VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , accelerationStructure( accelerationStructure_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class Fence
+ {
+ public:
+ using CType = VkFence;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT
+ : m_fence(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_fence(VK_NULL_HANDLE)
+ {}
- AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT
+ : m_fence( fence )
+ {}
- AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>(&rhs);
+ m_fence = fence;
return *this;
}
+#endif
- AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ m_fence = VK_NULL_HANDLE;
return *this;
}
- AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Fence const& ) const = default;
+#else
+ bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- type = type_;
- return *this;
+ return m_fence == rhs.m_fence;
}
- AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- accelerationStructure = accelerationStructure_;
- return *this;
+ return m_fence != rhs.m_fence;
}
- operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+ return m_fence < rhs.m_fence;
}
+#endif
- operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+ return m_fence;
}
- bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( type == rhs.type )
- && ( accelerationStructure == rhs.accelerationStructure );
+ return m_fence != VK_NULL_HANDLE;
}
- bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_fence == VK_NULL_HANDLE;
}
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+ private:
+ VkFence m_fence;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eFence>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Fence;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFence>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Fence;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Fence;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Fence>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
struct AcquireNextImageInfoKHR
{
- VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
- uint64_t timeout_ = {},
- VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
- , timeout( timeout_ )
- , semaphore( semaphore_ )
- , fence( fence_ )
- , deviceMask( deviceMask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : swapchain( swapchain_ ), timeout( timeout_ ), semaphore( semaphore_ ), fence( fence_ ), deviceMask( deviceMask_ )
{}
- VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) - offsetof( AcquireNextImageInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
+ return *this;
+ }
- AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AcquireNextImageInfoKHR ) );
return *this;
}
@@ -20155,6 +17141,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
@@ -20165,6 +17152,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AcquireNextImageInfoKHR const& ) const = default;
+#else
bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20180,6 +17171,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
@@ -20189,32 +17183,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::Fence fence = {};
uint32_t deviceMask = {};
+
};
static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
+ {
+ using Type = AcquireNextImageInfoKHR;
+ };
+
struct AcquireProfilingLockInfoKHR
{
- VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {},
- uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , timeout( timeout_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), timeout( timeout_ )
{}
- VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AcquireProfilingLockInfoKHR& operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AcquireProfilingLockInfoKHR ) );
return *this;
}
@@ -20236,6 +17242,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
@@ -20246,6 +17253,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AcquireProfilingLockInfoKHR const& ) const = default;
+#else
bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20258,40 +17269,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
uint64_t timeout = {};
+
};
static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
+ {
+ using Type = AcquireProfilingLockInfoKHR;
+ };
+
struct AllocationCallbacks
{
- VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = {},
- PFN_vkAllocationFunction pfnAllocation_ = {},
- PFN_vkReallocationFunction pfnReallocation_ = {},
- PFN_vkFreeFunction pfnFree_ = {},
- PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
- PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
- : pUserData( pUserData_ )
- , pfnAllocation( pfnAllocation_ )
- , pfnReallocation( pfnReallocation_ )
- , pfnFree( pfnFree_ )
- , pfnInternalAllocation( pfnInternalAllocation_ )
- , pfnInternalFree( pfnInternalFree_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AllocationCallbacks(void* pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT
+ : pUserData( pUserData_ ), pfnAllocation( pfnAllocation_ ), pfnReallocation( pfnReallocation_ ), pfnFree( pfnFree_ ), pfnInternalAllocation( pfnInternalAllocation_ ), pfnInternalFree( pfnInternalFree_ )
{}
+ VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
+ return *this;
+ }
- AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+ AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AllocationCallbacks ) );
return *this;
}
@@ -20331,6 +17354,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAllocationCallbacks*>( this );
@@ -20341,6 +17365,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAllocationCallbacks*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AllocationCallbacks const& ) const = default;
+#else
bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( pUserData == rhs.pUserData )
@@ -20355,6 +17383,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
void* pUserData = {};
@@ -20363,30 +17394,37 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkFreeFunction pfnFree = {};
PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
PFN_vkInternalFreeNotification pfnInternalFree = {};
+
};
static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
struct ComponentMapping
{
- VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
- VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
- VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
- VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
- : r( r_ )
- , g( g_ )
- , b( b_ )
- , a( a_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT
+ : r( r_ ), g( g_ ), b( b_ ), a( a_ )
{}
+ VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+ ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
+ return *this;
+ }
+
+ ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ComponentMapping ) );
return *this;
}
@@ -20414,6 +17452,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComponentMapping*>( this );
@@ -20424,6 +17463,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkComponentMapping*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ComponentMapping const& ) const = default;
+#else
bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( r == rhs.r )
@@ -20436,55 +17479,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+
};
static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ComponentMapping>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
struct AndroidHardwareBufferFormatPropertiesANDROID
{
- AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint64_t externalFormat_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
- VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , externalFormat( externalFormat_ )
- , formatFeatures( formatFeatures_ )
- , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
- , suggestedYcbcrModel( suggestedYcbcrModel_ )
- , suggestedYcbcrRange( suggestedYcbcrRange_ )
- , suggestedXChromaOffset( suggestedXChromaOffset_ )
- , suggestedYChromaOffset( suggestedYChromaOffset_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT
+ : format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
{}
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
return *this;
}
+ AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) );
+ return *this;
+ }
+
+
operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
@@ -20495,6 +17535,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default;
+#else
bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20513,6 +17557,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
@@ -20525,38 +17572,50 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+
};
static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
+ {
+ using Type = AndroidHardwareBufferFormatPropertiesANDROID;
+ };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
struct AndroidHardwareBufferPropertiesANDROID
{
- AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
- uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : allocationSize( allocationSize_ )
- , memoryTypeBits( memoryTypeBits_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+ : allocationSize( allocationSize_ ), memoryTypeBits( memoryTypeBits_ )
{}
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
+ return *this;
+ }
- AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidHardwareBufferPropertiesANDROID ) );
return *this;
}
+
operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
@@ -20567,6 +17626,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default;
+#else
bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20579,42 +17642,59 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
uint32_t memoryTypeBits = {};
+
};
static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
+ {
+ using Type = AndroidHardwareBufferPropertiesANDROID;
+ };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
struct AndroidHardwareBufferUsageANDROID
{
- AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT
- : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}) VULKAN_HPP_NOEXCEPT
+ : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
{}
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
return *this;
}
+ AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidHardwareBufferUsageANDROID ) );
+ return *this;
+ }
+
+
operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
@@ -20625,6 +17705,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default;
+#else
bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20636,40 +17720,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
void* pNext = {};
uint64_t androidHardwareBufferUsage = {};
+
};
static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
+ {
+ using Type = AndroidHardwareBufferUsageANDROID;
+ };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
struct AndroidSurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {},
- struct ANativeWindow* window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , window( window_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow* window_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), window( window_ )
{}
- VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
- AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
return *this;
}
@@ -20691,6 +17789,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
@@ -20701,6 +17800,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default;
+#else
bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20713,45 +17816,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
struct ANativeWindow* window = {};
+
};
static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
+ {
+ using Type = AndroidSurfaceCreateInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
struct ApplicationInfo
{
- VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = {},
- uint32_t applicationVersion_ = {},
- const char* pEngineName_ = {},
- uint32_t engineVersion_ = {},
- uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : pApplicationName( pApplicationName_ )
- , applicationVersion( applicationVersion_ )
- , pEngineName( pEngineName_ )
- , engineVersion( engineVersion_ )
- , apiVersion( apiVersion_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ApplicationInfo(const char* pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char* pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}) VULKAN_HPP_NOEXCEPT
+ : pApplicationName( pApplicationName_ ), applicationVersion( applicationVersion_ ), pEngineName( pEngineName_ ), engineVersion( engineVersion_ ), apiVersion( apiVersion_ )
{}
- VULKAN_HPP_NAMESPACE::ApplicationInfo & operator=( VULKAN_HPP_NAMESPACE::ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) - offsetof( ApplicationInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
+ return *this;
+ }
+
+ ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ApplicationInfo ) );
return *this;
}
@@ -20791,6 +17903,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkApplicationInfo*>( this );
@@ -20801,6 +17914,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkApplicationInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ApplicationInfo const& ) const = default;
+#else
bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -20816,6 +17933,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
@@ -20825,40 +17945,43 @@ namespace VULKAN_HPP_NAMESPACE
const char* pEngineName = {};
uint32_t engineVersion = {};
uint32_t apiVersion = {};
+
};
static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eApplicationInfo>
+ {
+ using Type = ApplicationInfo;
+ };
+
struct AttachmentDescription
{
- VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , format( format_ )
- , samples( samples_ )
- , loadOp( loadOp_ )
- , storeOp( storeOp_ )
- , stencilLoadOp( stencilLoadOp_ )
- , stencilStoreOp( stencilStoreOp_ )
- , initialLayout( initialLayout_ )
- , finalLayout( finalLayout_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
{}
+ VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
+ return *this;
+ }
- AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentDescription ) );
return *this;
}
@@ -20916,6 +18039,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescription*>( this );
@@ -20926,6 +18050,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentDescription*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentDescription const& ) const = default;
+#else
bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( flags == rhs.flags )
@@ -20943,6 +18071,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
@@ -20954,46 +18085,38 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
struct AttachmentDescription2
{
- VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , format( format_ )
- , samples( samples_ )
- , loadOp( loadOp_ )
- , storeOp( storeOp_ )
- , stencilLoadOp( stencilLoadOp_ )
- , stencilStoreOp( stencilStoreOp_ )
- , initialLayout( initialLayout_ )
- , finalLayout( finalLayout_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
{}
- VULKAN_HPP_NAMESPACE::AttachmentDescription2 & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) - offsetof( AttachmentDescription2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
+ return *this;
+ }
- AttachmentDescription2& operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentDescription2 ) );
return *this;
}
@@ -21057,6 +18180,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescription2*>( this );
@@ -21067,6 +18191,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentDescription2*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentDescription2 const& ) const = default;
+#else
bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21086,6 +18214,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
@@ -21099,32 +18230,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eAttachmentDescription2>
+ {
+ using Type = AttachmentDescription2;
+ };
+ using AttachmentDescription2KHR = AttachmentDescription2;
+
struct AttachmentDescriptionStencilLayout
{
- VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : stencilInitialLayout( stencilInitialLayout_ )
- , stencilFinalLayout( stencilFinalLayout_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : stencilInitialLayout( stencilInitialLayout_ ), stencilFinalLayout( stencilFinalLayout_ )
{}
- VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) - offsetof( AttachmentDescriptionStencilLayout, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AttachmentDescriptionStencilLayout& operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
+ return *this;
+ }
+
+ AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentDescriptionStencilLayout ) );
return *this;
}
@@ -21146,6 +18290,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
@@ -21156,6 +18301,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentDescriptionStencilLayout const& ) const = default;
+#else
bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21168,32 +18317,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
void* pNext = {};
VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayout>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
+ {
+ using Type = AttachmentDescriptionStencilLayout;
+ };
+ using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
+
struct AttachmentReference
{
- VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : attachment( attachment_ )
- , layout( layout_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : attachment( attachment_ ), layout( layout_ )
{}
+ VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
+ return *this;
+ }
+
+ AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentReference ) );
return *this;
}
@@ -21209,6 +18379,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReference*>( this );
@@ -21219,6 +18390,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentReference*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentReference const& ) const = default;
+#else
bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( attachment == rhs.attachment )
@@ -21229,38 +18404,45 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t attachment = {};
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentReference>::value, "struct wrapper is not a standard layout!" );
struct AttachmentReference2
{
- VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachment( attachment_ )
- , layout( layout_ )
- , aspectMask( aspectMask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : attachment( attachment_ ), layout( layout_ ), aspectMask( aspectMask_ )
{}
- VULKAN_HPP_NAMESPACE::AttachmentReference2 & operator=( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) - offsetof( AttachmentReference2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
+ return *this;
+ }
- AttachmentReference2& operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentReference2 ) );
return *this;
}
@@ -21288,6 +18470,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReference2*>( this );
@@ -21298,6 +18481,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentReference2*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentReference2 const& ) const = default;
+#else
bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21311,6 +18498,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
@@ -21318,30 +18508,45 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t attachment = {};
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+
};
static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eAttachmentReference2>
+ {
+ using Type = AttachmentReference2;
+ };
+ using AttachmentReference2KHR = AttachmentReference2;
+
struct AttachmentReferenceStencilLayout
{
- VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : stencilLayout( stencilLayout_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : stencilLayout( stencilLayout_ )
{}
- VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout & operator=( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) - offsetof( AttachmentReferenceStencilLayout, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- AttachmentReferenceStencilLayout& operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
+ return *this;
+ }
+
+ AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentReferenceStencilLayout ) );
return *this;
}
@@ -21357,6 +18562,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
@@ -21367,6 +18573,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentReferenceStencilLayout const& ) const = default;
+#else
bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21378,31 +18588,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
void* pNext = {};
VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
+ {
+ using Type = AttachmentReferenceStencilLayout;
+ };
+ using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
+
struct Extent2D
{
- VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {},
- uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
- : width( width_ )
- , height( height_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT
+ : width( width_ ), height( height_ )
{}
+ VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
+ return *this;
+ }
+
+ Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Extent2D ) );
return *this;
}
@@ -21418,6 +18649,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtent2D*>( this );
@@ -21428,6 +18660,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkExtent2D*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Extent2D const& ) const = default;
+#else
bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( width == rhs.width )
@@ -21438,30 +18674,44 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t width = {};
uint32_t height = {};
+
};
static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Extent2D>::value, "struct wrapper is not a standard layout!" );
struct SampleLocationEXT
{
- VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {},
- float y_ = {} ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ )
{}
+ VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
+ return *this;
+ }
- SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SampleLocationEXT ) );
return *this;
}
@@ -21477,6 +18727,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSampleLocationEXT*>( this );
@@ -21487,6 +18738,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSampleLocationEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SampleLocationEXT const& ) const = default;
+#else
bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( x == rhs.x )
@@ -21497,40 +18752,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
float x = {};
float y = {};
+
};
static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
struct SampleLocationsInfoEXT
{
- VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {},
- uint32_t sampleLocationsCount_ = {},
- const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
- , sampleLocationGridSize( sampleLocationGridSize_ )
- , sampleLocationsCount( sampleLocationsCount_ )
- , pSampleLocations( pSampleLocations_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+ : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( sampleLocationsCount_ ), pSampleLocations( pSampleLocations_ )
{}
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) - offsetof( SampleLocationsInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ )
+ : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SampleLocationsInfoEXT ) );
return *this;
}
@@ -21546,7 +18812,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
+ SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationGridSize = sampleLocationGridSize_;
return *this;
@@ -21564,6 +18830,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
+ pSampleLocations = sampleLocations_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
@@ -21574,6 +18850,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SampleLocationsInfoEXT const& ) const = default;
+#else
bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21588,6 +18868,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
@@ -21596,26 +18879,43 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
uint32_t sampleLocationsCount = {};
const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {};
+
};
static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
+ {
+ using Type = SampleLocationsInfoEXT;
+ };
+
struct AttachmentSampleLocationsEXT
{
- VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {},
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentIndex( attachmentIndex_ )
- , sampleLocationsInfo( sampleLocationsInfo_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : attachmentIndex( attachmentIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
{}
+ VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
+ return *this;
+ }
- AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentSampleLocationsEXT ) );
return *this;
}
@@ -21625,12 +18925,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
+
operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
@@ -21641,6 +18942,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default;
+#else
bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( attachmentIndex == rhs.attachmentIndex )
@@ -21651,27 +18956,44 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t attachmentIndex = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
};
static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
struct BaseInStructure
{
- BaseInStructure() VULKAN_HPP_NOEXCEPT
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
+ : sType( sType_ )
{}
+ BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
+ return *this;
+ }
- BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+ BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BaseInStructure ) );
return *this;
}
@@ -21681,6 +19003,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBaseInStructure*>( this );
@@ -21691,6 +19014,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBaseInStructure*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BaseInStructure const& ) const = default;
+#else
bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21701,27 +19028,44 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {};
+
};
static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BaseInStructure>::value, "struct wrapper is not a standard layout!" );
struct BaseOutStructure
{
- BaseOutStructure() VULKAN_HPP_NOEXCEPT
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
+ : sType( sType_ )
{}
+ BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
+ return *this;
+ }
- BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+ BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BaseOutStructure ) );
return *this;
}
@@ -21731,6 +19075,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBaseOutStructure*>( this );
@@ -21741,6 +19086,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBaseOutStructure*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BaseOutStructure const& ) const = default;
+#else
bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21751,92 +19100,215 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {};
+
};
static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
- struct BindAccelerationStructureMemoryInfoNV
+ class DeviceMemory
{
- VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- uint32_t deviceIndexCount_ = {},
- const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructure( accelerationStructure_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- , deviceIndexCount( deviceIndexCount_ )
- , pDeviceIndices( pDeviceIndices_ )
+ public:
+ using CType = VkDeviceMemory;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT
+ : m_deviceMemory(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV & operator=( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_deviceMemory(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
+ : m_deviceMemory( deviceMemory )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) );
+ m_deviceMemory = deviceMemory;
return *this;
}
+#endif
- BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_deviceMemory = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceMemory const& ) const = default;
+#else
+ bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deviceMemory == rhs.m_deviceMemory;
+ }
+
+ bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deviceMemory != rhs.m_deviceMemory;
+ }
+
+ bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deviceMemory < rhs.m_deviceMemory;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deviceMemory;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deviceMemory != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deviceMemory == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDeviceMemory m_deviceMemory;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDeviceMemory>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeviceMemory>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct BindAccelerationStructureMemoryInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
+ : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindAccelerationStructureMemoryInfoKHR( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindAccelerationStructureMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
+ : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindAccelerationStructureMemoryInfoKHR & operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ BindAccelerationStructureMemoryInfoKHR & operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindAccelerationStructureMemoryInfoKHR ) );
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoKHR & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
- operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindAccelerationStructureMemoryInfoKHR & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
+ deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+ pDeviceIndices = deviceIndices_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
- operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkBindAccelerationStructureMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
+ return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR*>( this );
}
- bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkBindAccelerationStructureMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindAccelerationStructureMemoryInfoKHR const& ) const = default;
+#else
+ bool operator==( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -21847,45 +19319,67 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pDeviceIndices == rhs.pDeviceIndices );
}
- bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
uint32_t deviceIndexCount = {};
const uint32_t* pDeviceIndices = {};
+
+ };
+ static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoKHR>
+ {
+ using Type = BindAccelerationStructureMemoryInfoKHR;
};
- static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
+ using BindAccelerationStructureMemoryInfoNV = BindAccelerationStructureMemoryInfoKHR;
struct BindBufferMemoryDeviceGroupInfo
{
- VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
- const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceIndexCount( deviceIndexCount_ )
- , pDeviceIndices( pDeviceIndices_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
{}
- VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
+ : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
+ return *this;
+ }
+
+ BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) );
return *this;
}
@@ -21907,6 +19401,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+ pDeviceIndices = deviceIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
@@ -21917,6 +19421,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default;
+#else
bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -21929,40 +19437,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
const void* pNext = {};
uint32_t deviceIndexCount = {};
const uint32_t* pDeviceIndices = {};
+
};
static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
+ {
+ using Type = BindBufferMemoryDeviceGroupInfo;
+ };
+ using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
+
struct BindBufferMemoryInfo
{
- VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
{}
- VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
+ return *this;
+ }
- BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindBufferMemoryInfo ) );
return *this;
}
@@ -21990,6 +19512,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
@@ -22000,6 +19523,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindBufferMemoryInfo const& ) const = default;
+#else
bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -22013,6 +19540,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
@@ -22020,26 +19550,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+
};
static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
+ {
+ using Type = BindBufferMemoryInfo;
+ };
+ using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+
struct Offset2D
{
- VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {},
- int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ )
{}
+ VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
+ return *this;
+ }
- Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Offset2D ) );
return *this;
}
@@ -22055,6 +19603,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOffset2D*>( this );
@@ -22065,6 +19614,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkOffset2D*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Offset2D const& ) const = default;
+#else
bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( x == rhs.x )
@@ -22075,45 +19628,60 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
int32_t x = {};
int32_t y = {};
+
};
static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );
struct Rect2D
{
- VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
- : offset( offset_ )
- , extent( extent_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : offset( offset_ ), extent( extent_ )
{}
+ VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
+ return *this;
+ }
- Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Rect2D ) );
return *this;
}
- Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
+ Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
+ Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
+
operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRect2D*>( this );
@@ -22124,6 +19692,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRect2D*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Rect2D const& ) const = default;
+#else
bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( offset == rhs.offset )
@@ -22134,40 +19706,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
+
};
static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );
struct BindImageMemoryDeviceGroupInfo
{
- VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
- const uint32_t* pDeviceIndices_ = {},
- uint32_t splitInstanceBindRegionCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceIndexCount( deviceIndexCount_ )
- , pDeviceIndices( pDeviceIndices_ )
- , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
- , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ), splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ), pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
{}
- VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {} )
+ : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
+ return *this;
+ }
+
+ BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) );
return *this;
}
@@ -22189,6 +19772,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+ pDeviceIndices = deviceIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
{
splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
@@ -22201,6 +19793,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
+ pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
@@ -22211,6 +19813,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default;
+#else
bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -22225,6 +19831,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
@@ -22233,34 +19842,145 @@ namespace VULKAN_HPP_NAMESPACE
const uint32_t* pDeviceIndices = {};
uint32_t splitInstanceBindRegionCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {};
+
};
static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
- struct BindImageMemoryInfo
+ template <>
+ struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
+ {
+ using Type = BindImageMemoryDeviceGroupInfo;
+ };
+ using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+
+ class Image
{
- VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
+ public:
+ using CType = VkImage;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT
+ : m_image(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_image(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT
+ : m_image( image )
{}
- VULKAN_HPP_NAMESPACE::BindImageMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) );
+ m_image = image;
return *this;
}
+#endif
+
+ Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_image = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Image const& ) const = default;
+#else
+ bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_image == rhs.m_image;
+ }
+
+ bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_image != rhs.m_image;
+ }
+
+ bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_image < rhs.m_image;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_image;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_image != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_image == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkImage m_image;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImage>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Image;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImage>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Image;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Image;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Image>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct BindImageMemoryInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
+ return *this;
+ }
- BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImageMemoryInfo ) );
return *this;
}
@@ -22288,6 +20008,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
@@ -22298,6 +20019,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindImageMemoryInfo const& ) const = default;
+#else
bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -22311,6 +20036,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
@@ -22318,32 +20046,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+
};
static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
+ {
+ using Type = BindImageMemoryInfo;
+ };
+ using BindImageMemoryInfoKHR = BindImageMemoryInfo;
+
struct BindImageMemorySwapchainInfoKHR
{
- VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
- uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
- , imageIndex( imageIndex_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : swapchain( swapchain_ ), imageIndex( imageIndex_ )
{}
- VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR & operator=( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) );
return *this;
}
@@ -22365,6 +20106,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
@@ -22375,6 +20117,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default;
+#else
bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -22387,36 +20133,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
uint32_t imageIndex = {};
+
};
static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
+ {
+ using Type = BindImageMemorySwapchainInfoKHR;
+ };
+
struct BindImagePlaneMemoryInfo
{
- VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : planeAspect( planeAspect_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
+ : planeAspect( planeAspect_ )
{}
- VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
+ return *this;
+ }
+
+ BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImagePlaneMemoryInfo ) );
return *this;
}
@@ -22432,6 +20195,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
@@ -22442,6 +20206,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default;
+#else
bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -22453,37 +20221,208 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
};
static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
+ {
+ using Type = BindImagePlaneMemoryInfo;
+ };
+ using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+
+ struct BindIndexBufferIndirectCommandNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
+ : bufferAddress( bufferAddress_ ), size( size_ ), indexType( indexType_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
+ return *this;
+ }
+
+ BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindIndexBufferIndirectCommandNV ) );
+ return *this;
+ }
+
+ BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferAddress = bufferAddress_;
+ return *this;
+ }
+
+ BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexType = indexType_;
+ return *this;
+ }
+
+
+ operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
+ }
+
+ operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default;
+#else
+ bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( bufferAddress == rhs.bufferAddress )
+ && ( size == rhs.size )
+ && ( indexType == rhs.indexType );
+ }
+
+ bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
+ uint32_t size = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+
+ };
+ static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct BindShaderGroupIndirectCommandNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : groupIndex( groupIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
+ return *this;
+ }
+
+ BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindShaderGroupIndirectCommandNV ) );
+ return *this;
+ }
+
+ BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupIndex = groupIndex_;
+ return *this;
+ }
+
+
+ operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
+ }
+
+ operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default;
+#else
+ bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( groupIndex == rhs.groupIndex );
+ }
+
+ bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t groupIndex = {};
+
+ };
+ static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
struct SparseMemoryBind
{
- VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : resourceOffset( resourceOffset_ )
- , size( size_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- , flags( flags_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : resourceOffset( resourceOffset_ ), size( size_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
{}
+ VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+ SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
+ return *this;
+ }
+
+ SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseMemoryBind ) );
return *this;
}
@@ -22517,6 +20456,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseMemoryBind*>( this );
@@ -22527,6 +20467,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSparseMemoryBind*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseMemoryBind const& ) const = default;
+#else
bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( resourceOffset == rhs.resourceOffset )
@@ -22540,6 +20484,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
@@ -22547,28 +20494,43 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
+
};
static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
struct SparseBufferMemoryBindInfo
{
- VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- uint32_t bindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- , bindCount( bindCount_ )
- , pBinds( pBinds_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
{}
+ VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+ : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
+ return *this;
+ }
+
+ SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseBufferMemoryBindInfo ) );
return *this;
}
@@ -22590,6 +20552,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindCount = static_cast<uint32_t>( binds_.size() );
+ pBinds = binds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
@@ -22600,6 +20572,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default;
+#else
bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( buffer == rhs.buffer )
@@ -22611,33 +20587,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
+
};
static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
struct SparseImageOpaqueMemoryBindInfo
{
- VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- uint32_t bindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , bindCount( bindCount_ )
- , pBinds( pBinds_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
{}
+ VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+ : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
+ return *this;
+ }
+
+ SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
return *this;
}
@@ -22659,6 +20653,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindCount = static_cast<uint32_t>( binds_.size() );
+ pBinds = binds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
@@ -22669,6 +20673,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default;
+#else
bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( image == rhs.image )
@@ -22680,33 +20688,45 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Image image = {};
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
+
};
static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
struct ImageSubresource
{
- VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- uint32_t mipLevel_ = {},
- uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectMask( aspectMask_ )
- , mipLevel( mipLevel_ )
- , arrayLayer( arrayLayer_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), arrayLayer( arrayLayer_ )
{}
+ VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
+ return *this;
+ }
- ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSubresource ) );
return *this;
}
@@ -22728,6 +20748,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresource*>( this );
@@ -22738,6 +20759,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkImageSubresource*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageSubresource const& ) const = default;
+#else
bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( aspectMask == rhs.aspectMask )
@@ -22749,40 +20774,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
uint32_t mipLevel = {};
uint32_t arrayLayer = {};
+
};
static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );
struct Offset3D
{
- VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {},
- int32_t y_ = {},
- int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
- , z( z_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ ), z( z_ )
{}
- explicit Offset3D( Offset2D const& offset2D,
- int32_t z_ = {} )
+ VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ explicit Offset3D( Offset2D const& offset2D, int32_t z_ = {} )
: x( offset2D.x )
, y( offset2D.y )
, z( z_ )
{}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
+ return *this;
}
- Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Offset3D ) );
return *this;
}
@@ -22804,6 +20840,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOffset3D*>( this );
@@ -22814,6 +20851,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkOffset3D*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Offset3D const& ) const = default;
+#else
bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( x == rhs.x )
@@ -22825,40 +20866,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
int32_t x = {};
int32_t y = {};
int32_t z = {};
+
};
static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );
struct Extent3D
{
- VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {},
- uint32_t height_ = {},
- uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
- : width( width_ )
- , height( height_ )
- , depth( depth_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+ : width( width_ ), height( height_ ), depth( depth_ )
{}
- explicit Extent3D( Extent2D const& extent2D,
- uint32_t depth_ = {} )
+ VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ explicit Extent3D( Extent2D const& extent2D, uint32_t depth_ = {} )
: width( extent2D.width )
, height( extent2D.height )
, depth( depth_ )
{}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
+ return *this;
}
- Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+ Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Extent3D ) );
return *this;
}
@@ -22880,6 +20932,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtent3D*>( this );
@@ -22890,6 +20943,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkExtent3D*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Extent3D const& ) const = default;
+#else
bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( width == rhs.width )
@@ -22901,55 +20958,61 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t width = {};
uint32_t height = {};
uint32_t depth = {};
+
};
static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );
struct SparseImageMemoryBind
{
- VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D offset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : subresource( subresource_ )
- , offset( offset_ )
- , extent( extent_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- , flags( flags_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : subresource( subresource_ ), offset( offset_ ), extent( extent_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
{}
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
return *this;
}
- SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryBind ) );
+ return *this;
+ }
+
+ SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
{
subresource = subresource_;
return *this;
}
- SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -22973,6 +21036,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
@@ -22983,6 +21047,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageMemoryBind const& ) const = default;
+#else
bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( subresource == rhs.subresource )
@@ -22997,6 +21065,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
@@ -23005,28 +21076,43 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
+
};
static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
struct SparseImageMemoryBindInfo
{
- VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- uint32_t bindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , bindCount( bindCount_ )
- , pBinds( pBinds_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
{}
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
+ : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
+ return *this;
+ }
+
+ SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryBindInfo ) );
return *this;
}
@@ -23048,6 +21134,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindCount = static_cast<uint32_t>( binds_.size() );
+ pBinds = binds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
@@ -23058,6 +21154,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageMemoryBindInfo const& ) const = default;
+#else
bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( image == rhs.image )
@@ -23069,53 +21169,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Image image = {};
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {};
+
};
static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
struct BindSparseInfo
{
- VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
- uint32_t bufferBindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {},
- uint32_t imageOpaqueBindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {},
- uint32_t imageBindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {},
- uint32_t signalSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , bufferBindCount( bufferBindCount_ )
- , pBufferBinds( pBufferBinds_ )
- , imageOpaqueBindCount( imageOpaqueBindCount_ )
- , pImageOpaqueBinds( pImageOpaqueBinds_ )
- , imageBindCount( imageBindCount_ )
- , pImageBinds( pImageBinds_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphores( pSignalSemaphores_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), bufferBindCount( bufferBindCount_ ), pBufferBinds( pBufferBinds_ ), imageOpaqueBindCount( imageOpaqueBindCount_ ), pImageOpaqueBinds( pImageOpaqueBinds_ ), imageBindCount( imageBindCount_ ), pImageBinds( pImageBinds_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
{}
- VULKAN_HPP_NAMESPACE::BindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
+ : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
+ return *this;
+ }
+
+ BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindSparseInfo ) );
return *this;
}
@@ -23137,6 +21236,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+ pWaitSemaphores = waitSemaphores_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
{
bufferBindCount = bufferBindCount_;
@@ -23149,6 +21257,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
+ pBufferBinds = bufferBinds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
{
imageOpaqueBindCount = imageOpaqueBindCount_;
@@ -23161,6 +21278,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
+ pImageOpaqueBinds = imageOpaqueBinds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
{
imageBindCount = imageBindCount_;
@@ -23173,6 +21299,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
+ pImageBinds = imageBinds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
@@ -23185,6 +21320,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
+ pSignalSemaphores = signalSemaphores_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindSparseInfo*>( this );
@@ -23195,6 +21340,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBindSparseInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindSparseInfo const& ) const = default;
+#else
bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23215,6 +21364,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
@@ -23229,28 +21381,484 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {};
uint32_t signalSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
+
};
static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBindSparseInfo>
+ {
+ using Type = BindSparseInfo;
+ };
+
+ struct BindVertexBufferIndirectCommandNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+ : bufferAddress( bufferAddress_ ), size( size_ ), stride( stride_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
+ return *this;
+ }
+
+ BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BindVertexBufferIndirectCommandNV ) );
+ return *this;
+ }
+
+ BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferAddress = bufferAddress_;
+ return *this;
+ }
+
+ BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stride = stride_;
+ return *this;
+ }
+
+
+ operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
+ }
+
+ operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default;
+#else
+ bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( bufferAddress == rhs.bufferAddress )
+ && ( size == rhs.size )
+ && ( stride == rhs.stride );
+ }
+
+ bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
+ uint32_t size = {};
+ uint32_t stride = {};
+
+ };
+ static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageSubresourceLayers
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
+ return *this;
+ }
+
+ ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSubresourceLayers ) );
+ return *this;
+ }
+
+ ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ aspectMask = aspectMask_;
+ return *this;
+ }
+
+ ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mipLevel = mipLevel_;
+ return *this;
+ }
+
+ ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ baseArrayLayer = baseArrayLayer_;
+ return *this;
+ }
+
+ ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layerCount = layerCount_;
+ return *this;
+ }
+
+
+ operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
+ }
+
+ operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageSubresourceLayers*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageSubresourceLayers const& ) const = default;
+#else
+ bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( mipLevel == rhs.mipLevel )
+ && ( baseArrayLayer == rhs.baseArrayLayer )
+ && ( layerCount == rhs.layerCount );
+ }
+
+ bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t mipLevel = {};
+ uint32_t baseArrayLayer = {};
+ uint32_t layerCount = {};
+
+ };
+ static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageBlit2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2KHR const *>( &rhs );
+ return *this;
+ }
+
+ ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageBlit2KHR ) );
+ return *this;
+ }
+
+ ImageBlit2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageBlit2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageBlit2KHR & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffsets = srcOffsets_;
+ return *this;
+ }
+
+ ImageBlit2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageBlit2KHR & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffsets = dstOffsets_;
+ return *this;
+ }
+
+
+ operator VkImageBlit2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageBlit2KHR*>( this );
+ }
+
+ operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageBlit2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageBlit2KHR const& ) const = default;
+#else
+ bool operator==( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffsets == rhs.srcOffsets )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffsets == rhs.dstOffsets );
+ }
+
+ bool operator!=( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+
+ };
+ static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageBlit2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageBlit2KHR>
+ {
+ using Type = ImageBlit2KHR;
+ };
+
+ struct BlitImageInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest) VULKAN_HPP_NOEXCEPT
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ), filter( filter_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BlitImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest )
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ BlitImageInfo2KHR & operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BlitImageInfo2KHR ) );
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImage = dstImage_;
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BlitImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
+ {
+ filter = filter_;
+ return *this;
+ }
+
+
+ operator VkBlitImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBlitImageInfo2KHR*>( this );
+ }
+
+ operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBlitImageInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BlitImageInfo2KHR const& ) const = default;
+#else
+ bool operator==( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcImage == rhs.srcImage )
+ && ( srcImageLayout == rhs.srcImageLayout )
+ && ( dstImage == rhs.dstImage )
+ && ( dstImageLayout == rhs.dstImageLayout )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions )
+ && ( filter == rhs.filter );
+ }
+
+ bool operator!=( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions = {};
+ VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+
+ };
+ static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BlitImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eBlitImageInfo2KHR>
+ {
+ using Type = BlitImageInfo2KHR;
+ };
+
struct BufferCopy
{
- VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcOffset( srcOffset_ )
- , dstOffset( dstOffset_ )
- , size( size_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
{}
+ VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
+ return *this;
+ }
+
+ BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferCopy ) );
return *this;
}
@@ -23272,6 +21880,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCopy*>( this );
@@ -23282,6 +21891,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferCopy*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferCopy const& ) const = default;
+#else
bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( srcOffset == rhs.srcOffset )
@@ -23293,45 +21906,155 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
};
static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );
- struct BufferCreateInfo
+ struct BufferCopy2KHR
{
- VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , size( size_ )
- , usage( usage_ )
- , sharingMode( sharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
{}
- VULKAN_HPP_NAMESPACE::BufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2KHR const *>( &rhs );
return *this;
}
+ BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferCopy2KHR ) );
+ return *this;
+ }
+
+ BufferCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+
+ operator VkBufferCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBufferCopy2KHR*>( this );
+ }
+
+ operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBufferCopy2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferCopy2KHR const& ) const = default;
+#else
+ bool operator==( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstOffset == rhs.dstOffset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+ };
+ static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BufferCopy2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eBufferCopy2KHR>
+ {
+ using Type = BufferCopy2KHR;
+ };
+
+ struct BufferCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
+ : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferCreateInfo ) );
return *this;
}
@@ -23377,6 +22100,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+ pQueueFamilyIndices = queueFamilyIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCreateInfo*>( this );
@@ -23387,6 +22120,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferCreateInfo const& ) const = default;
+#else
bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23403,6 +22140,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
@@ -23413,30 +22153,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t* pQueueFamilyIndices = {};
+
};
static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBufferCreateInfo>
+ {
+ using Type = BufferCreateInfo;
+ };
+
struct BufferDeviceAddressCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceAddress( deviceAddress_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceAddress( deviceAddress_ )
{}
- VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
- BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferDeviceAddressCreateInfoEXT ) );
return *this;
}
@@ -23452,6 +22206,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
@@ -23462,6 +22217,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default;
+#else
bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23473,35 +22232,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+
};
static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
+ {
+ using Type = BufferDeviceAddressCreateInfoEXT;
+ };
+
struct BufferDeviceAddressInfo
{
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
{}
- VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) - offsetof( BufferDeviceAddressInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BufferDeviceAddressInfo& operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
+ return *this;
+ }
+
+ BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferDeviceAddressInfo ) );
return *this;
}
@@ -23517,6 +22293,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
@@ -23527,6 +22304,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferDeviceAddressInfo const& ) const = default;
+#else
bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23538,170 +22319,229 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
};
static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
- struct ImageSubresourceLayers
+ template <>
+ struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
{
- VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- uint32_t mipLevel_ = {},
- uint32_t baseArrayLayer_ = {},
- uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectMask( aspectMask_ )
- , mipLevel( mipLevel_ )
- , baseArrayLayer( baseArrayLayer_ )
- , layerCount( layerCount_ )
+ using Type = BufferDeviceAddressInfo;
+ };
+ using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
+ using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
+
+ struct BufferImageCopy
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+ : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
{}
- ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
return *this;
}
- ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- aspectMask = aspectMask_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferImageCopy ) );
return *this;
}
- ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
- mipLevel = mipLevel_;
+ bufferOffset = bufferOffset_;
return *this;
}
- ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
{
- baseArrayLayer = baseArrayLayer_;
+ bufferRowLength = bufferRowLength_;
return *this;
}
- ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
- layerCount = layerCount_;
+ bufferImageHeight = bufferImageHeight_;
return *this;
}
- operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
+ imageSubresource = imageSubresource_;
+ return *this;
}
- operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageSubresourceLayers*>( this );
+ imageOffset = imageOffset_;
+ return *this;
}
- bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
- return ( aspectMask == rhs.aspectMask )
- && ( mipLevel == rhs.mipLevel )
- && ( baseArrayLayer == rhs.baseArrayLayer )
- && ( layerCount == rhs.layerCount );
+ imageExtent = imageExtent_;
+ return *this;
}
- bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBufferImageCopy*>( this );
+ }
+
+ operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBufferImageCopy*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferImageCopy const& ) const = default;
+#else
+ bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( bufferOffset == rhs.bufferOffset )
+ && ( bufferRowLength == rhs.bufferRowLength )
+ && ( bufferImageHeight == rhs.bufferImageHeight )
+ && ( imageSubresource == rhs.imageSubresource )
+ && ( imageOffset == rhs.imageOffset )
+ && ( imageExtent == rhs.imageExtent );
+ }
+
+ bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
- uint32_t mipLevel = {};
- uint32_t baseArrayLayer = {};
- uint32_t layerCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
+ uint32_t bufferRowLength = {};
+ uint32_t bufferImageHeight = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
};
- static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
- struct BufferImageCopy
+ struct BufferImageCopy2KHR
{
- VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {},
- uint32_t bufferRowLength_ = {},
- uint32_t bufferImageHeight_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
- : bufferOffset( bufferOffset_ )
- , bufferRowLength( bufferRowLength_ )
- , bufferImageHeight( bufferImageHeight_ )
- , imageSubresource( imageSubresource_ )
- , imageOffset( imageOffset_ )
- , imageExtent( imageExtent_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+ : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
{}
- BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR const *>( &rhs );
return *this;
}
- BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferImageCopy2KHR ) );
+ return *this;
+ }
+
+ BufferImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
bufferOffset = bufferOffset_;
return *this;
}
- BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
{
bufferRowLength = bufferRowLength_;
return *this;
}
- BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
bufferImageHeight = bufferImageHeight_;
return *this;
}
- BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
- BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
- BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
- operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkBufferImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkBufferImageCopy*>( this );
+ return *reinterpret_cast<const VkBufferImageCopy2KHR*>( this );
}
- operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkBufferImageCopy*>( this );
+ return *reinterpret_cast<VkBufferImageCopy2KHR*>( this );
}
- bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferImageCopy2KHR const& ) const = default;
+#else
+ bool operator==( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( bufferOffset == rhs.bufferOffset )
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( bufferOffset == rhs.bufferOffset )
&& ( bufferRowLength == rhs.bufferRowLength )
&& ( bufferImageHeight == rhs.bufferImageHeight )
&& ( imageSubresource == rhs.imageSubresource )
@@ -23709,54 +22549,61 @@ namespace VULKAN_HPP_NAMESPACE
&& ( imageExtent == rhs.imageExtent );
}
- bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2KHR;
+ const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
uint32_t bufferRowLength = {};
uint32_t bufferImageHeight = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
+ };
+ static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BufferImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eBufferImageCopy2KHR>
+ {
+ using Type = BufferImageCopy2KHR;
};
- static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
struct BufferMemoryBarrier
{
- VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
- uint32_t srcQueueFamilyIndex_ = {},
- uint32_t dstQueueFamilyIndex_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
- , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
- , buffer( buffer_ )
- , offset( offset_ )
- , size( size_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
{}
- VULKAN_HPP_NAMESPACE::BufferMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
+ return *this;
+ }
- BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferMemoryBarrier ) );
return *this;
}
@@ -23808,6 +22655,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
@@ -23818,6 +22666,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferMemoryBarrier const& ) const = default;
+#else
bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23835,6 +22687,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
@@ -23846,30 +22701,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
};
static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
+ {
+ using Type = BufferMemoryBarrier;
+ };
+
struct BufferMemoryRequirementsInfo2
{
- VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
{}
- VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
+ return *this;
+ }
+
+ BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferMemoryRequirementsInfo2 ) );
return *this;
}
@@ -23885,6 +22754,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
@@ -23895,6 +22765,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default;
+#else
bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23906,35 +22780,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
};
static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
+ {
+ using Type = BufferMemoryRequirementsInfo2;
+ };
+ using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+
struct BufferOpaqueCaptureAddressCreateInfo
{
- VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : opaqueCaptureAddress( opaqueCaptureAddress_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
+ : opaqueCaptureAddress( opaqueCaptureAddress_ )
{}
- VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) - offsetof( BufferOpaqueCaptureAddressCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BufferOpaqueCaptureAddressCreateInfo& operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferOpaqueCaptureAddressCreateInfo ) );
return *this;
}
@@ -23950,6 +22842,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
@@ -23960,6 +22853,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const& ) const = default;
+#else
bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -23971,43 +22868,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
const void* pNext = {};
uint64_t opaqueCaptureAddress = {};
+
};
static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferOpaqueCaptureAddressCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
+ {
+ using Type = BufferOpaqueCaptureAddressCreateInfo;
+ };
+ using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
+
struct BufferViewCreateInfo
{
- VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , buffer( buffer_ )
- , format( format_ )
- , offset( offset_ )
- , range( range_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), buffer( buffer_ ), format( format_ ), offset( offset_ ), range( range_ )
{}
- VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferViewCreateInfo ) );
return *this;
}
@@ -24047,6 +22954,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
@@ -24057,6 +22965,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferViewCreateInfo const& ) const = default;
+#else
bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -24072,6 +22984,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
@@ -24081,30 +22996,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
};
static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
+ {
+ using Type = BufferViewCreateInfo;
+ };
+
struct CalibratedTimestampInfoEXT
{
- VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
- : timeDomain( timeDomain_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice) VULKAN_HPP_NOEXCEPT
+ : timeDomain( timeDomain_ )
{}
- VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
+ return *this;
+ }
- CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CalibratedTimestampInfoEXT ) );
return *this;
}
@@ -24120,6 +23049,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( this );
@@ -24130,6 +23060,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default;
+#else
bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -24141,40 +23075,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
+
};
static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
+ {
+ using Type = CalibratedTimestampInfoEXT;
+ };
+
struct CheckpointDataNV
{
- CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe,
- void* pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT
- : stage( stage_ )
- , pCheckpointMarker( pCheckpointMarker_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void* pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
+ : stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
{}
- VULKAN_HPP_NAMESPACE::CheckpointDataNV & operator=( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) - offsetof( CheckpointDataNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
return *this;
}
+ CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CheckpointDataNV ) );
+ return *this;
+ }
+
+
operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCheckpointDataNV*>( this );
@@ -24185,6 +23135,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCheckpointDataNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CheckpointDataNV const& ) const = default;
+#else
bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -24197,54 +23151,66 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
void* pCheckpointMarker = {};
+
};
static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eCheckpointDataNV>
+ {
+ using Type = CheckpointDataNV;
+ };
+
union ClearColorValue
{
- ClearColorValue( const std::array<float,4>& float32_ = {} )
+ ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( float32, float32_.data(), 4 * sizeof( float ) );
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
}
+ ClearColorValue( const std::array<float,4>& float32_ = {} )
+ : float32( float32_ )
+ {}
+
ClearColorValue( const std::array<int32_t,4>& int32_ )
- {
- memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) );
- }
+ : int32( int32_ )
+ {}
ClearColorValue( const std::array<uint32_t,4>& uint32_ )
- {
- memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
- }
+ : uint32( uint32_ )
+ {}
ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( float32, float32_.data(), 4 * sizeof( float ) );
+ float32 = float32_;
return *this;
}
ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) );
+ int32 = int32_;
return *this;
}
ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+ uint32 = uint32_;
return *this;
}
VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
return *this;
}
@@ -24258,27 +23224,37 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkClearColorValue*>(this);
}
- float float32[4];
- int32_t int32[4];
- uint32_t uint32[4];
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
};
struct ClearDepthStencilValue
{
- VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {},
- uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
- : depth( depth_ )
- , stencil( stencil_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT
+ : depth( depth_ ), stencil( stencil_ )
{}
+ VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+ ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
+ return *this;
+ }
+
+ ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ClearDepthStencilValue ) );
return *this;
}
@@ -24294,6 +23270,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
@@ -24304,6 +23281,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkClearDepthStencilValue*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ClearDepthStencilValue const& ) const = default;
+#else
bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( depth == rhs.depth )
@@ -24314,33 +23295,40 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
float depth = {};
uint32_t stencil = {};
+
};
static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
union ClearValue
{
- ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
+ ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT
{
- color = color_;
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
}
+ ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
+ : color( color_ )
+ {}
+
ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
- {
- depthStencil = depthStencil_;
- }
+ : depthStencil( depthStencil_ )
+ {}
- ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue color_ ) VULKAN_HPP_NOEXCEPT
+ ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
{
color = color_;
return *this;
}
- ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) VULKAN_HPP_NOEXCEPT
+ ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
{
depthStencil = depthStencil_;
return *this;
@@ -24348,7 +23336,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
return *this;
}
@@ -24373,22 +23361,30 @@ namespace VULKAN_HPP_NAMESPACE
struct ClearAttachment
{
- ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- uint32_t colorAttachment_ = {},
- VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectMask( aspectMask_ )
- , colorAttachment( colorAttachment_ )
- , clearValue( clearValue_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ ), colorAttachment( colorAttachment_ ), clearValue( clearValue_ )
{}
+ ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+ ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
+ return *this;
+ }
+
+ ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ClearAttachment ) );
return *this;
}
@@ -24404,12 +23400,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue clearValue_ ) VULKAN_HPP_NOEXCEPT
+ ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
{
clearValue = clearValue_;
return *this;
}
+
operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearAttachment*>( this );
@@ -24420,36 +23417,48 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkClearAttachment*>( this );
}
+
+
+
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
uint32_t colorAttachment = {};
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
+
};
static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ClearAttachment>::value, "struct wrapper is not a standard layout!" );
struct ClearRect
{
- VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {},
- uint32_t baseArrayLayer_ = {},
- uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : rect( rect_ )
- , baseArrayLayer( baseArrayLayer_ )
- , layerCount( layerCount_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : rect( rect_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
{}
+ VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
+ return *this;
+ }
- ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+ ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ClearRect ) );
return *this;
}
- ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ ) VULKAN_HPP_NOEXCEPT
+ ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
{
rect = rect_;
return *this;
@@ -24467,6 +23476,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearRect*>( this );
@@ -24477,6 +23487,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkClearRect*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ClearRect const& ) const = default;
+#else
bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( rect == rhs.rect )
@@ -24488,587 +23502,644 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Rect2D rect = {};
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
+
};
static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ClearRect>::value, "struct wrapper is not a standard layout!" );
- struct IndirectCommandsTokenNVX
+ struct CoarseSampleLocationNV
{
- VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline,
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
- : tokenType( tokenType_ )
- , buffer( buffer_ )
- , offset( offset_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT
+ : pixelX( pixelX_ ), pixelY( pixelY_ ), sample( sample_ )
{}
- IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
return *this;
}
- IndirectCommandsTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- tokenType = tokenType_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CoarseSampleLocationNV ) );
return *this;
}
- IndirectCommandsTokenNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
{
- buffer = buffer_;
+ pixelX = pixelX_;
return *this;
}
- IndirectCommandsTokenNVX & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
{
- offset = offset_;
+ pixelY = pixelY_;
return *this;
}
- operator VkIndirectCommandsTokenNVX const&() const VULKAN_HPP_NOEXCEPT
+ CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>( this );
+ sample = sample_;
+ return *this;
}
- operator VkIndirectCommandsTokenNVX &() VULKAN_HPP_NOEXCEPT
+
+ operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkIndirectCommandsTokenNVX*>( this );
+ return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
}
- bool operator==( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
{
- return ( tokenType == rhs.tokenType )
- && ( buffer == rhs.buffer )
- && ( offset == rhs.offset );
+ return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CoarseSampleLocationNV const& ) const = default;
+#else
+ bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( pixelX == rhs.pixelX )
+ && ( pixelY == rhs.pixelY )
+ && ( sample == rhs.sample );
}
- bool operator!=( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline;
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ uint32_t pixelX = {};
+ uint32_t pixelY = {};
+ uint32_t sample = {};
+
};
- static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<IndirectCommandsTokenNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
- struct CmdProcessCommandsInfoNVX
+ struct CoarseSampleOrderCustomNV
{
- VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {},
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {},
- uint32_t indirectCommandsTokenCount_ = {},
- const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = {},
- uint32_t maxSequencesCount_ = {},
- VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
- VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectTable( objectTable_ )
- , indirectCommandsLayout( indirectCommandsLayout_ )
- , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
- , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
- , maxSequencesCount( maxSequencesCount_ )
- , targetCommandBuffer( targetCommandBuffer_ )
- , sequencesCountBuffer( sequencesCountBuffer_ )
- , sequencesCountOffset( sequencesCountOffset_ )
- , sequencesIndexBuffer( sequencesIndexBuffer_ )
- , sequencesIndexOffset( sequencesIndexOffset_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+ : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( sampleLocationCount_ ), pSampleLocations( pSampleLocations_ )
{}
- VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX ) - offsetof( CmdProcessCommandsInfoNVX, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const *>(&rhs);
- return *this;
- }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
+ : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CmdProcessCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
return *this;
}
- CmdProcessCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- objectTable = objectTable_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CoarseSampleOrderCustomNV ) );
return *this;
}
- CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
{
- indirectCommandsLayout = indirectCommandsLayout_;
+ shadingRate = shadingRate_;
return *this;
}
- CmdProcessCommandsInfoNVX & setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
{
- indirectCommandsTokenCount = indirectCommandsTokenCount_;
+ sampleCount = sampleCount_;
return *this;
}
- CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
{
- pIndirectCommandsTokens = pIndirectCommandsTokens_;
+ sampleLocationCount = sampleLocationCount_;
return *this;
}
- CmdProcessCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
- maxSequencesCount = maxSequencesCount_;
+ pSampleLocations = pSampleLocations_;
return *this;
}
- CmdProcessCommandsInfoNVX & setTargetCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
- targetCommandBuffer = targetCommandBuffer_;
+ sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
+ pSampleLocations = sampleLocations_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- CmdProcessCommandsInfoNVX & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesCountBuffer = sequencesCountBuffer_;
- return *this;
- }
- CmdProcessCommandsInfoNVX & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT
{
- sequencesCountOffset = sequencesCountOffset_;
- return *this;
+ return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
}
- CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
{
- sequencesIndexBuffer = sequencesIndexBuffer_;
- return *this;
+ return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
}
- CmdProcessCommandsInfoNVX & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesIndexOffset = sequencesIndexOffset_;
- return *this;
- }
- operator VkCmdProcessCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default;
+#else
+ bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( this );
+ return ( shadingRate == rhs.shadingRate )
+ && ( sampleCount == rhs.sampleCount )
+ && ( sampleLocationCount == rhs.sampleLocationCount )
+ && ( pSampleLocations == rhs.pSampleLocations );
}
- operator VkCmdProcessCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT
+ bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCmdProcessCommandsInfoNVX*>( this );
+ return !operator==( rhs );
}
+#endif
- bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( objectTable == rhs.objectTable )
- && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
- && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
- && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
- && ( maxSequencesCount == rhs.maxSequencesCount )
- && ( targetCommandBuffer == rhs.targetCommandBuffer )
- && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
- && ( sequencesCountOffset == rhs.sequencesCountOffset )
- && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
- && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
- }
- bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {};
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {};
- uint32_t indirectCommandsTokenCount = {};
- const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens = {};
- uint32_t maxSequencesCount = {};
- VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer = {};
- VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
- VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
+ VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
+ uint32_t sampleCount = {};
+ uint32_t sampleLocationCount = {};
+ const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {};
+
};
- static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CmdProcessCommandsInfoNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
- struct CmdReserveSpaceForCommandsInfoNVX
+ class CommandPool
{
- VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {},
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {},
- uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectTable( objectTable_ )
- , indirectCommandsLayout( indirectCommandsLayout_ )
- , maxSequencesCount( maxSequencesCount_ )
+ public:
+ using CType = VkCommandPool;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
+
+ public:
+ VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT
+ : m_commandPool(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX ) - offsetof( CmdReserveSpaceForCommandsInfoNVX, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_commandPool(VK_NULL_HANDLE)
+ {}
- CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
+ : m_commandPool( commandPool )
+ {}
- CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const *>(&rhs);
+ m_commandPool = commandPool;
return *this;
}
+#endif
- CmdReserveSpaceForCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ m_commandPool = VK_NULL_HANDLE;
return *this;
}
- CmdReserveSpaceForCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandPool const& ) const = default;
+#else
+ bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- objectTable = objectTable_;
- return *this;
+ return m_commandPool == rhs.m_commandPool;
}
- CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- indirectCommandsLayout = indirectCommandsLayout_;
- return *this;
+ return m_commandPool != rhs.m_commandPool;
}
- CmdReserveSpaceForCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+ bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- maxSequencesCount = maxSequencesCount_;
- return *this;
+ return m_commandPool < rhs.m_commandPool;
}
+#endif
- operator VkCmdReserveSpaceForCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( this );
+ return m_commandPool;
}
- operator VkCmdReserveSpaceForCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX*>( this );
+ return m_commandPool != VK_NULL_HANDLE;
}
- bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( objectTable == rhs.objectTable )
- && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
- && ( maxSequencesCount == rhs.maxSequencesCount );
+ return m_commandPool == VK_NULL_HANDLE;
}
- bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
+ private:
+ VkCommandPool m_commandPool;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {};
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {};
- uint32_t maxSequencesCount = {};
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandPool>
+ {
+ using type = VULKAN_HPP_NAMESPACE::CommandPool;
};
- static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CmdReserveSpaceForCommandsInfoNVX>::value, "struct wrapper is not a standard layout!" );
- struct CoarseSampleLocationNV
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool>
{
- VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {},
- uint32_t pixelY_ = {},
- uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
- : pixelX( pixelX_ )
- , pixelY( pixelY_ )
- , sample( sample_ )
+ using Type = VULKAN_HPP_NAMESPACE::CommandPool;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CommandPool;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandPool>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct CommandBufferAllocateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : commandPool( commandPool_ ), level( level_ ), commandBufferCount( commandBufferCount_ )
{}
- CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
return *this;
}
- CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pixelX = pixelX_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferAllocateInfo ) );
return *this;
}
- CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- pixelY = pixelY_;
+ pNext = pNext_;
return *this;
}
- CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
{
- sample = sample_;
+ commandPool = commandPool_;
return *this;
}
- operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
+ level = level_;
+ return *this;
}
- operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
+ commandBufferCount = commandBufferCount_;
+ return *this;
}
- bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return ( pixelX == rhs.pixelX )
- && ( pixelY == rhs.pixelY )
- && ( sample == rhs.sample );
+ return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
}
- bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBufferAllocateInfo const& ) const = default;
+#else
+ bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( commandPool == rhs.commandPool )
+ && ( level == rhs.level )
+ && ( commandBufferCount == rhs.commandBufferCount );
+ }
+
+ bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t pixelX = {};
- uint32_t pixelY = {};
- uint32_t sample = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
+ VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
+ uint32_t commandBufferCount = {};
+
};
- static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- struct CoarseSampleOrderCustomNV
+ template <>
+ struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
{
- VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations,
- uint32_t sampleCount_ = {},
- uint32_t sampleLocationCount_ = {},
- const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRate( shadingRate_ )
- , sampleCount( sampleCount_ )
- , sampleLocationCount( sampleLocationCount_ )
- , pSampleLocations( pSampleLocations_ )
+ using Type = CommandBufferAllocateInfo;
+ };
+
+ class RenderPass
+ {
+ public:
+ using CType = VkRenderPass;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
+
+ public:
+ VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT
+ : m_renderPass(VK_NULL_HANDLE)
{}
- CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_renderPass(VK_NULL_HANDLE)
+ {}
- CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>(&rhs);
- return *this;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
+ : m_renderPass( renderPass )
+ {}
- CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT
{
- shadingRate = shadingRate_;
+ m_renderPass = renderPass;
return *this;
}
+#endif
- CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- sampleCount = sampleCount_;
+ m_renderPass = VK_NULL_HANDLE;
return *this;
}
- CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPass const& ) const = default;
+#else
+ bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- sampleLocationCount = sampleLocationCount_;
- return *this;
+ return m_renderPass == rhs.m_renderPass;
}
- CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- pSampleLocations = pSampleLocations_;
- return *this;
+ return m_renderPass != rhs.m_renderPass;
}
- operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT
+ bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
+ return m_renderPass < rhs.m_renderPass;
}
+#endif
- operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
+ return m_renderPass;
}
- bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return ( shadingRate == rhs.shadingRate )
- && ( sampleCount == rhs.sampleCount )
- && ( sampleLocationCount == rhs.sampleLocationCount )
- && ( pSampleLocations == rhs.pSampleLocations );
+ return m_renderPass != VK_NULL_HANDLE;
}
- bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_renderPass == VK_NULL_HANDLE;
}
- public:
- VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
- uint32_t sampleCount = {};
- uint32_t sampleLocationCount = {};
- const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {};
+ private:
+ VkRenderPass m_renderPass;
};
- static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
- struct CommandBufferAllocateInfo
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eRenderPass>
+ {
+ using type = VULKAN_HPP_NAMESPACE::RenderPass;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::RenderPass;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::RenderPass;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::RenderPass>
{
- VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {},
- VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
- uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : commandPool( commandPool_ )
- , level( level_ )
- , commandBufferCount( commandBufferCount_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class Framebuffer
+ {
+ public:
+ using CType = VkFramebuffer;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT
+ : m_framebuffer(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) - offsetof( CommandBufferAllocateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_framebuffer(VK_NULL_HANDLE)
+ {}
- CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
+ : m_framebuffer( framebuffer )
+ {}
- CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>(&rhs);
+ m_framebuffer = framebuffer;
return *this;
}
+#endif
- CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ m_framebuffer = VK_NULL_HANDLE;
return *this;
}
- CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Framebuffer const& ) const = default;
+#else
+ bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- commandPool = commandPool_;
- return *this;
+ return m_framebuffer == rhs.m_framebuffer;
}
- CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- level = level_;
- return *this;
+ return m_framebuffer != rhs.m_framebuffer;
}
- CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+ bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- commandBufferCount = commandBufferCount_;
- return *this;
+ return m_framebuffer < rhs.m_framebuffer;
}
+#endif
- operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
+ return m_framebuffer;
}
- operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
+ return m_framebuffer != VK_NULL_HANDLE;
}
- bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( commandPool == rhs.commandPool )
- && ( level == rhs.level )
- && ( commandBufferCount == rhs.commandBufferCount );
+ return m_framebuffer == VK_NULL_HANDLE;
}
- bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
+ private:
+ VkFramebuffer m_framebuffer;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
- VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
- uint32_t commandBufferCount = {};
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eFramebuffer>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Framebuffer;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Framebuffer>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
struct CommandBufferInheritanceInfo
{
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t subpass_ = {},
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {},
- VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {},
- VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
- , subpass( subpass_ )
- , framebuffer( framebuffer_ )
- , occlusionQueryEnable( occlusionQueryEnable_ )
- , queryFlags( queryFlags_ )
- , pipelineStatistics( pipelineStatistics_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
+ : renderPass( renderPass_ ), subpass( subpass_ ), framebuffer( framebuffer_ ), occlusionQueryEnable( occlusionQueryEnable_ ), queryFlags( queryFlags_ ), pipelineStatistics( pipelineStatistics_ )
{}
- VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferInheritanceInfo ) );
return *this;
}
@@ -25114,6 +24185,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
@@ -25124,6 +24196,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBufferInheritanceInfo const& ) const = default;
+#else
bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25140,6 +24216,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
@@ -25150,32 +24229,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
+
};
static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
+ {
+ using Type = CommandBufferInheritanceInfo;
+ };
+
struct CommandBufferBeginInfo
{
- VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {},
- const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pInheritanceInfo( pInheritanceInfo_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pInheritanceInfo( pInheritanceInfo_ )
{}
- VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) - offsetof( CommandBufferBeginInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
+ return *this;
+ }
+
+ CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferBeginInfo ) );
return *this;
}
@@ -25197,6 +24288,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
@@ -25207,6 +24299,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBufferBeginInfo const& ) const = default;
+#else
bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25219,36 +24315,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {};
+
};
static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
+ {
+ using Type = CommandBufferBeginInfo;
+ };
+
struct CommandBufferInheritanceConditionalRenderingInfoEXT
{
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : conditionalRenderingEnable( conditionalRenderingEnable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : conditionalRenderingEnable( conditionalRenderingEnable_ )
{}
- VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
+ return *this;
+ }
- CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) );
return *this;
}
@@ -25264,6 +24377,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
@@ -25274,6 +24388,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default;
+#else
bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25285,37 +24403,147 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
+
};
static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct CommandPoolCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
{
- VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {},
- uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueFamilyIndex( queueFamilyIndex_ )
+ using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
+ };
+
+ struct CommandBufferInheritanceRenderPassTransformInfoQCOM
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}) VULKAN_HPP_NOEXCEPT
+ : transform( transform_ ), renderArea( renderArea_ )
{}
- VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) - offsetof( CommandPoolCreateInfo, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
+ return *this;
+ }
+
+ CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) );
return *this;
}
+ CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ {
+ transform = transform_;
+ return *this;
+ }
+
+ CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+ {
+ renderArea = renderArea_;
+ return *this;
+ }
+
+
+ operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
+ }
+
+ operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const& ) const = default;
+#else
+ bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( transform == rhs.transform )
+ && ( renderArea == rhs.renderArea );
+ }
+
+ bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+
+ };
+ static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
+ {
+ using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
+ };
+
+ struct CommandPoolCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandPoolCreateInfo ) );
return *this;
}
@@ -25337,6 +24565,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
@@ -25347,6 +24576,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandPoolCreateInfo const& ) const = default;
+#else
bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25359,34 +24592,152 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
+
};
static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
+ {
+ using Type = CommandPoolCreateInfo;
+ };
+
+ class ShaderModule
+ {
+ public:
+ using CType = VkShaderModule;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+
+ public:
+ VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
+ : m_shaderModule(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_shaderModule(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
+ : m_shaderModule( shaderModule )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+ {
+ m_shaderModule = shaderModule;
+ return *this;
+ }
+#endif
+
+ ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_shaderModule = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShaderModule const& ) const = default;
+#else
+ bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule == rhs.m_shaderModule;
+ }
+
+ bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule != rhs.m_shaderModule;
+ }
+
+ bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule < rhs.m_shaderModule;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkShaderModule m_shaderModule;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
+ {
+ using type = VULKAN_HPP_NAMESPACE::ShaderModule;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
struct SpecializationMapEntry
{
- VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {},
- uint32_t offset_ = {},
- size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
- : constantID( constantID_ )
- , offset( offset_ )
- , size( size_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
+ : constantID( constantID_ ), offset( offset_ ), size( size_ )
{}
+ VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+ SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
+ return *this;
+ }
+
+ SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SpecializationMapEntry ) );
return *this;
}
@@ -25408,6 +24759,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
@@ -25418,6 +24770,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSpecializationMapEntry*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SpecializationMapEntry const& ) const = default;
+#else
bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( constantID == rhs.constantID )
@@ -25429,35 +24785,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t constantID = {};
uint32_t offset = {};
size_t size = {};
+
};
static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
struct SpecializationInfo
{
- VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {},
- size_t dataSize_ = {},
- const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT
- : mapEntryCount( mapEntryCount_ )
- , pMapEntries( pMapEntries_ )
- , dataSize( dataSize_ )
- , pData( pData_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+ : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
{}
+ VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
+ : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
+ return *this;
+ }
+
+ SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SpecializationInfo ) );
return *this;
}
@@ -25473,6 +24846,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
+ pMapEntries = mapEntries_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = dataSize_;
@@ -25485,6 +24867,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataSize = data_.size() * sizeof(T);
+ pData = data_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSpecializationInfo*>( this );
@@ -25495,6 +24888,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSpecializationInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SpecializationInfo const& ) const = default;
+#else
bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( mapEntryCount == rhs.mapEntryCount )
@@ -25507,44 +24904,47 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t mapEntryCount = {};
const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
size_t dataSize = {};
const void* pData = {};
+
};
static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
struct PipelineShaderStageCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
- VULKAN_HPP_NAMESPACE::ShaderModule module_ = {},
- const char* pName_ = {},
- const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stage( stage_ )
- , module( module_ )
- , pName( pName_ )
- , pSpecializationInfo( pSpecializationInfo_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+ return *this;
+ }
- PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineShaderStageCreateInfo ) );
return *this;
}
@@ -25584,6 +24984,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
@@ -25594,6 +24995,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
+#else
bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25609,6 +25014,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
@@ -25618,38 +25026,244 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ShaderModule module = {};
const char* pName = {};
const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
+
};
static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ComputePipelineCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
+ {
+ using Type = PipelineShaderStageCreateInfo;
+ };
+
+ class PipelineLayout
+ {
+ public:
+ using CType = VkPipelineLayout;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
+
+ public:
+ VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT
+ : m_pipelineLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_pipelineLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
+ : m_pipelineLayout( pipelineLayout )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT
+ {
+ m_pipelineLayout = pipelineLayout;
+ return *this;
+ }
+#endif
+
+ PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_pipelineLayout = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineLayout const& ) const = default;
+#else
+ bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineLayout == rhs.m_pipelineLayout;
+ }
+
+ bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineLayout != rhs.m_pipelineLayout;
+ }
+
+ bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineLayout < rhs.m_pipelineLayout;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineLayout;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineLayout != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineLayout == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPipelineLayout m_pipelineLayout;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipelineLayout>
+ {
+ using type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout>
{
- VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stage( stage_ )
- , layout( layout_ )
- , basePipelineHandle( basePipelineHandle_ )
- , basePipelineIndex( basePipelineIndex_ )
+ using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineLayout>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class Pipeline
+ {
+ public:
+ using CType = VkPipeline;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
+ : m_pipeline(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_pipeline(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
+ : m_pipeline( pipeline )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) - offsetof( ComputePipelineCreateInfo, pNext ) );
+ m_pipeline = pipeline;
return *this;
}
+#endif
+
+ Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_pipeline = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Pipeline const& ) const = default;
+#else
+ bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipeline == rhs.m_pipeline;
+ }
+
+ bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipeline != rhs.m_pipeline;
+ }
+
+ bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipeline < rhs.m_pipeline;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipeline;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipeline != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipeline == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPipeline m_pipeline;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Pipeline;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct ComputePipelineCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ComputePipelineCreateInfo ) );
return *this;
}
@@ -25665,7 +25279,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
@@ -25689,6 +25303,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
@@ -25699,6 +25314,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
+#else
bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25714,6 +25333,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
@@ -25723,34 +25345,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
+
};
static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
+ {
+ using Type = ComputePipelineCreateInfo;
+ };
+
struct ConditionalRenderingBeginInfoEXT
{
- VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- , offset( offset_ )
- , flags( flags_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) );
return *this;
}
@@ -25778,6 +25410,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
@@ -25788,6 +25421,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
+#else
bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -25801,6 +25438,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
@@ -25808,30 +25448,43 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
+
};
static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
+ {
+ using Type = ConditionalRenderingBeginInfoEXT;
+ };
+
struct ConformanceVersion
{
- VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {},
- uint8_t minor_ = {},
- uint8_t subminor_ = {},
- uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
- : major( major_ )
- , minor( minor_ )
- , subminor( subminor_ )
- , patch( patch_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
+ : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
{}
+ VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ConformanceVersion& operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
+ return *this;
+ }
+
+ ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ConformanceVersion ) );
return *this;
}
@@ -25859,6 +25512,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConformanceVersion*>( this );
@@ -25869,6 +25523,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkConformanceVersion*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ConformanceVersion const& ) const = default;
+#else
bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( major == rhs.major )
@@ -25881,50 +25539,48 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint8_t major = {};
uint8_t minor = {};
uint8_t subminor = {};
uint8_t patch = {};
+
};
static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+ using ConformanceVersionKHR = ConformanceVersion;
struct CooperativeMatrixPropertiesNV
{
- VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {},
- uint32_t NSize_ = {},
- uint32_t KSize_ = {},
- VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
- : MSize( MSize_ )
- , NSize( NSize_ )
- , KSize( KSize_ )
- , AType( AType_ )
- , BType( BType_ )
- , CType( CType_ )
- , DType( DType_ )
- , scope( scope_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
+ : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
{}
- VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CooperativeMatrixPropertiesNV ) );
return *this;
}
@@ -25982,6 +25638,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
@@ -25992,6 +25649,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
+#else
bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26010,6 +25671,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
@@ -26022,42 +25686,596 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
+
};
static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- struct CopyDescriptorSet
+ template <>
+ struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
+ {
+ using Type = CooperativeMatrixPropertiesNV;
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct CopyAccelerationStructureInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+ : src( src_ ), dst( dst_ ), mode( mode_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyAccelerationStructureInfoKHR ) );
+ return *this;
+ }
+
+ CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+
+
+ operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
+ }
+
+ operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default;
+#else
+ bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( src == rhs.src )
+ && ( dst == rhs.dst )
+ && ( mode == rhs.mode );
+ }
+
+ bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+ };
+ static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
{
- VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {},
- uint32_t srcBinding_ = {},
- uint32_t srcArrayElement_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
- uint32_t dstBinding_ = {},
- uint32_t dstArrayElement_ = {},
- uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSet( srcSet_ )
- , srcBinding( srcBinding_ )
- , srcArrayElement( srcArrayElement_ )
- , dstSet( dstSet_ )
- , dstBinding( dstBinding_ )
- , dstArrayElement( dstArrayElement_ )
- , descriptorCount( descriptorCount_ )
+ using Type = CopyAccelerationStructureInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct CopyAccelerationStructureToMemoryInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+ : src( src_ ), dst( dst_ ), mode( mode_ )
{}
- VULKAN_HPP_NAMESPACE::CopyDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) - offsetof( CopyDescriptorSet, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
return *this;
}
+ CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyAccelerationStructureToMemoryInfoKHR ) );
+ return *this;
+ }
+
+ CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+
+
+ operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
+ }
+
+ operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
+ }
+
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+ };
+ static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyAccelerationStructureToMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
+ {
+ using Type = CopyAccelerationStructureToMemoryInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ struct CopyBufferInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ )
+ : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyBufferInfo2KHR & operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyBufferInfo2KHR ) );
+ return *this;
+ }
+
+ CopyBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcBuffer = srcBuffer_;
+ return *this;
+ }
+
+ CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstBuffer = dstBuffer_;
+ return *this;
+ }
+
+ CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkCopyBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyBufferInfo2KHR*>( this );
+ }
+
+ operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyBufferInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyBufferInfo2KHR const& ) const = default;
+#else
+ bool operator==( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcBuffer == rhs.srcBuffer )
+ && ( dstBuffer == rhs.dstBuffer )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions = {};
+
+ };
+ static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyBufferInfo2KHR>
+ {
+ using Type = CopyBufferInfo2KHR;
+ };
+
+ struct CopyBufferToImageInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyBufferToImageInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
+ : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyBufferToImageInfo2KHR ) );
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcBuffer = srcBuffer_;
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImage = dstImage_;
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ CopyBufferToImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyBufferToImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkCopyBufferToImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyBufferToImageInfo2KHR*>( this );
+ }
+
+ operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyBufferToImageInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyBufferToImageInfo2KHR const& ) const = default;
+#else
+ bool operator==( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcBuffer == rhs.srcBuffer )
+ && ( dstImage == rhs.dstImage )
+ && ( dstImageLayout == rhs.dstImageLayout )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
+
+ };
+ static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyBufferToImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2KHR>
+ {
+ using Type = CopyBufferToImageInfo2KHR;
+ };
+
+ class DescriptorSet
+ {
+ public:
+ using CType = VkDescriptorSet;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
+ : m_descriptorSet(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorSet(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorSet( descriptorSet )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorSet = descriptorSet;
+ return *this;
+ }
+#endif
+
+ DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorSet = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSet const& ) const = default;
+#else
+ bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSet == rhs.m_descriptorSet;
+ }
+
+ bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSet != rhs.m_descriptorSet;
+ }
+
+ bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSet < rhs.m_descriptorSet;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSet;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSet != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSet == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorSet m_descriptorSet;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct CopyDescriptorSet
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
+ return *this;
+ }
- CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyDescriptorSet ) );
return *this;
}
@@ -26109,6 +26327,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
@@ -26119,6 +26338,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkCopyDescriptorSet*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyDescriptorSet const& ) const = default;
+#else
bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26136,6 +26359,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
@@ -26147,38 +26373,533 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dstBinding = {};
uint32_t dstArrayElement = {};
uint32_t descriptorCount = {};
+
};
static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eCopyDescriptorSet>
+ {
+ using Type = CopyDescriptorSet;
+ };
- struct D3D12FenceSubmitInfoKHR
+ struct ImageCopy2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageCopy2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2KHR const *>( &rhs );
+ return *this;
+ }
+
+ ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageCopy2KHR ) );
+ return *this;
+ }
+
+ ImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageCopy2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageCopy2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCopy2KHR*>( this );
+ }
+
+ operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCopy2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageCopy2KHR const& ) const = default;
+#else
+ bool operator==( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+ };
+ static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageCopy2KHR>
+ {
+ using Type = ImageCopy2KHR;
+ };
+
+ struct CopyImageInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyImageInfo2KHR & operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyImageInfo2KHR ) );
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImage = dstImage_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkCopyImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyImageInfo2KHR*>( this );
+ }
+
+ operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyImageInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyImageInfo2KHR const& ) const = default;
+#else
+ bool operator==( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcImage == rhs.srcImage )
+ && ( srcImageLayout == rhs.srcImageLayout )
+ && ( dstImage == rhs.dstImage )
+ && ( dstImageLayout == rhs.dstImageLayout )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions = {};
+
+ };
+ static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyImageInfo2KHR>
{
- VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {},
- const uint64_t* pWaitSemaphoreValues_ = {},
- uint32_t signalSemaphoreValuesCount_ = {},
- const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
- , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
- , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
- , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+ using Type = CopyImageInfo2KHR;
+ };
+
+ struct CopyImageToBufferInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
{}
- VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageToBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyImageToBufferInfo2KHR ) );
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstBuffer = dstBuffer_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageToBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) - offsetof( D3D12FenceSubmitInfoKHR, pNext ) );
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkCopyImageToBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( this );
+ }
+
+ operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyImageToBufferInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyImageToBufferInfo2KHR const& ) const = default;
+#else
+ bool operator==( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcImage == rhs.srcImage )
+ && ( srcImageLayout == rhs.srcImageLayout )
+ && ( dstBuffer == rhs.dstBuffer )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
+
+ };
+ static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyImageToBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2KHR>
+ {
+ using Type = CopyImageToBufferInfo2KHR;
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct CopyMemoryToAccelerationStructureInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+ : src( src_ ), dst( dst_ ), mode( mode_ )
+ {}
+
+ CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyMemoryToAccelerationStructureInfoKHR ) );
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+
+
+ operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+ }
+
+ operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+ }
+
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+ };
+ static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
+ {
+ using Type = CopyMemoryToAccelerationStructureInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct D3D12FenceSubmitInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
+ : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
return *this;
}
@@ -26200,6 +26921,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+ pWaitSemaphoreValues = waitSemaphoreValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
@@ -26212,6 +26942,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+ pSignalSemaphoreValues = signalSemaphoreValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
@@ -26222,6 +26962,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
+#else
bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26236,6 +26980,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
@@ -26244,35 +26991,45 @@ namespace VULKAN_HPP_NAMESPACE
const uint64_t* pWaitSemaphoreValues = {};
uint32_t signalSemaphoreValuesCount = {};
const uint64_t* pSignalSemaphoreValues = {};
+
};
static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
+ {
+ using Type = D3D12FenceSubmitInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
struct DebugMarkerMarkerInfoEXT
{
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = {},
- std::array<float,4> const& color_ = {} ) VULKAN_HPP_NOEXCEPT
- : pMarkerName( pMarkerName_ )
- , color{}
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,4,4>::copy( color, color_ );
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
- VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) - offsetof( DebugMarkerMarkerInfoEXT, pNext ) );
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char* pMarkerName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+ : pMarkerName( pMarkerName_ ), color( color_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
+ return *this;
+ }
- DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
return *this;
}
@@ -26290,10 +27047,11 @@ namespace VULKAN_HPP_NAMESPACE
DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( color, color_.data(), 4 * sizeof( float ) );
+ color = color_;
return *this;
}
+
operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
@@ -26304,52 +27062,69 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default;
+#else
bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pMarkerName == rhs.pMarkerName )
- && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+ && ( color == rhs.color );
}
bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
const void* pNext = {};
const char* pMarkerName = {};
- float color[4] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
};
static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
+ {
+ using Type = DebugMarkerMarkerInfoEXT;
+ };
+
struct DebugMarkerObjectNameInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = {},
- const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , object( object_ )
- , pObjectName( pObjectName_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
{}
- VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
return *this;
}
@@ -26377,6 +27152,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
@@ -26387,6 +27163,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default;
+#else
bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26400,6 +27180,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
@@ -26407,38 +27190,51 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
uint64_t object = {};
const char* pObjectName = {};
+
};
static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
+ {
+ using Type = DebugMarkerObjectNameInfoEXT;
+ };
+
struct DebugMarkerObjectTagInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = {},
- uint64_t tagName_ = {},
- size_t tagSize_ = {},
- const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , object( object_ )
- , tagName( tagName_ )
- , tagSize( tagSize_ )
- , pTag( pTag_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
{}
- VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
+ : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
return *this;
}
@@ -26478,6 +27274,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tagSize = tag_.size() * sizeof(T);
+ pTag = tag_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
@@ -26488,6 +27295,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default;
+#else
bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26503,6 +27314,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
@@ -26512,34 +27326,44 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t tagName = {};
size_t tagSize = {};
const void* pTag = {};
+
};
static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
+ {
+ using Type = DebugMarkerObjectTagInfoEXT;
+ };
+
struct DebugReportCallbackCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {},
- PFN_vkDebugReportCallbackEXT pfnCallback_ = {},
- void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pfnCallback( pfnCallback_ )
- , pUserData( pUserData_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
{}
- VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
return *this;
}
@@ -26567,6 +27391,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
@@ -26577,6 +27402,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default;
+#else
bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26590,6 +27419,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
@@ -26597,34 +27429,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
PFN_vkDebugReportCallbackEXT pfnCallback = {};
void* pUserData = {};
+
};
static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
+ {
+ using Type = DebugReportCallbackCreateInfoEXT;
+ };
+
struct DebugUtilsLabelEXT
{
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = {},
- std::array<float,4> const& color_ = {} ) VULKAN_HPP_NOEXCEPT
- : pLabelName( pLabelName_ )
- , color{}
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,4,4>::copy( color, color_ );
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
- VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) - offsetof( DebugUtilsLabelEXT, pNext ) );
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+ : pLabelName( pLabelName_ ), color( color_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsLabelEXT ) );
return *this;
}
@@ -26642,10 +27484,11 @@ namespace VULKAN_HPP_NAMESPACE
DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( color, color_.data(), 4 * sizeof( float ) );
+ color = color_;
return *this;
}
+
operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
@@ -26656,52 +27499,69 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
+#else
bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pLabelName == rhs.pLabelName )
- && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+ && ( color == rhs.color );
}
bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
const void* pNext = {};
const char* pLabelName = {};
- float color[4] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
};
static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
+ {
+ using Type = DebugUtilsLabelEXT;
+ };
+
struct DebugUtilsObjectNameInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
- uint64_t objectHandle_ = {},
- const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , objectHandle( objectHandle_ )
- , pObjectName( pObjectName_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
{}
- VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) );
return *this;
}
@@ -26729,6 +27589,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
@@ -26739,6 +27600,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default;
+#else
bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26752,6 +27617,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
@@ -26759,48 +27627,50 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
uint64_t objectHandle = {};
const char* pObjectName = {};
+
};
static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
+ {
+ using Type = DebugUtilsObjectNameInfoEXT;
+ };
+
struct DebugUtilsMessengerCallbackDataEXT
{
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {},
- const char* pMessageIdName_ = {},
- int32_t messageIdNumber_ = {},
- const char* pMessage_ = {},
- uint32_t queueLabelCount_ = {},
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {},
- uint32_t cmdBufLabelCount_ = {},
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {},
- uint32_t objectCount_ = {},
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pMessageIdName( pMessageIdName_ )
- , messageIdNumber( messageIdNumber_ )
- , pMessage( pMessage_ )
- , queueLabelCount( queueLabelCount_ )
- , pQueueLabels( pQueueLabels_ )
- , cmdBufLabelCount( cmdBufLabelCount_ )
- , pCmdBufLabels( pCmdBufLabels_ )
- , objectCount( objectCount_ )
- , pObjects( pObjects_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char* pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char* pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
{}
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char* pMessageIdName_, int32_t messageIdNumber_, const char* pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {} )
+ : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) );
return *this;
}
@@ -26846,6 +27716,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
+ pQueueLabels = queueLabels_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
{
cmdBufLabelCount = cmdBufLabelCount_;
@@ -26858,6 +27737,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+ {
+ cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
+ pCmdBufLabels = cmdBufLabels_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
{
objectCount = objectCount_;
@@ -26870,6 +27758,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
+ {
+ objectCount = static_cast<uint32_t>( objects_.size() );
+ pObjects = objects_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
@@ -26880,6 +27778,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default;
+#else
bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -26900,6 +27802,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
@@ -26914,38 +27819,44 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {};
uint32_t objectCount = {};
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {};
+
};
static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
+ {
+ using Type = DebugUtilsMessengerCallbackDataEXT;
+ };
+
struct DebugUtilsMessengerCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {},
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {},
- PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
- void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , messageSeverity( messageSeverity_ )
- , messageType( messageType_ )
- , pfnUserCallback( pfnUserCallback_ )
- , pUserData( pUserData_ )
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
{}
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) );
return *this;
}
@@ -26985,6 +27896,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
@@ -26995,6 +27907,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default;
+#else
bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27010,6 +27926,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
@@ -27019,38 +27938,51 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
void* pUserData = {};
+
};
static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
+ {
+ using Type = DebugUtilsMessengerCreateInfoEXT;
+ };
+
struct DebugUtilsObjectTagInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
- uint64_t objectHandle_ = {},
- uint64_t tagName_ = {},
- size_t tagSize_ = {},
- const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , objectHandle( objectHandle_ )
- , tagName( tagName_ )
- , tagSize( tagSize_ )
- , pTag( pTag_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
{}
- VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
+ : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) );
return *this;
}
@@ -27090,6 +28022,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tagSize = tag_.size() * sizeof(T);
+ pTag = tag_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
@@ -27100,6 +28043,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default;
+#else
bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27115,6 +28062,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
@@ -27124,30 +28074,44 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t tagName = {};
size_t tagSize = {};
const void* pTag = {};
+
};
static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
+ {
+ using Type = DebugUtilsObjectTagInfoEXT;
+ };
+
struct DedicatedAllocationBufferCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocation( dedicatedAllocation_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+ : dedicatedAllocation( dedicatedAllocation_ )
{}
- VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
return *this;
}
@@ -27163,6 +28127,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
@@ -27173,6 +28138,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default;
+#else
bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27184,35 +28153,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
};
static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
+ {
+ using Type = DedicatedAllocationBufferCreateInfoNV;
+ };
+
struct DedicatedAllocationImageCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocation( dedicatedAllocation_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+ : dedicatedAllocation( dedicatedAllocation_ )
{}
- VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
+ return *this;
+ }
- DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
return *this;
}
@@ -27228,6 +28214,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
@@ -27238,6 +28225,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default;
+#else
bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27249,37 +28240,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
};
static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
+ {
+ using Type = DedicatedAllocationImageCreateInfoNV;
+ };
+
struct DedicatedAllocationMemoryAllocateInfoNV
{
- VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , buffer( buffer_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ ), buffer( buffer_ )
{}
- VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
+ return *this;
+ }
- DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
return *this;
}
@@ -27301,6 +28307,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
@@ -27311,6 +28318,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default;
+#else
bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27323,34 +28334,237 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
};
static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
+ {
+ using Type = DedicatedAllocationMemoryAllocateInfoNV;
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ class DeferredOperationKHR
+ {
+ public:
+ using CType = VkDeferredOperationKHR;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT
+ : m_deferredOperationKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_deferredOperationKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
+ : m_deferredOperationKHR( deferredOperationKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT
+ {
+ m_deferredOperationKHR = deferredOperationKHR;
+ return *this;
+ }
+#endif
+
+ DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_deferredOperationKHR = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeferredOperationKHR const& ) const = default;
+#else
+ bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deferredOperationKHR == rhs.m_deferredOperationKHR;
+ }
+
+ bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deferredOperationKHR != rhs.m_deferredOperationKHR;
+ }
+
+ bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deferredOperationKHR < rhs.m_deferredOperationKHR;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deferredOperationKHR;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deferredOperationKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_deferredOperationKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDeferredOperationKHR m_deferredOperationKHR;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDeferredOperationKHR>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
+ };
+
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct DeferredOperationInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeferredOperationInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {}) VULKAN_HPP_NOEXCEPT
+ : operationHandle( operationHandle_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeferredOperationInfoKHR( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeferredOperationInfoKHR & operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeferredOperationInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ DeferredOperationInfoKHR & operator=( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeferredOperationInfoKHR ) );
+ return *this;
+ }
+
+ DeferredOperationInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeferredOperationInfoKHR & setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ operationHandle = operationHandle_;
+ return *this;
+ }
+
+
+ operator VkDeferredOperationInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeferredOperationInfoKHR*>( this );
+ }
+
+ operator VkDeferredOperationInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeferredOperationInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeferredOperationInfoKHR const& ) const = default;
+#else
+ bool operator==( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( operationHandle == rhs.operationHandle );
+ }
+
+ bool operator!=( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle = {};
+
+ };
+ static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeferredOperationInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeferredOperationInfoKHR>
+ {
+ using Type = DeferredOperationInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct DescriptorBufferInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- , offset( offset_ )
- , range( range_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ ), range( range_ )
{}
+ VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorBufferInfo ) );
return *this;
}
@@ -27372,6 +28586,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
@@ -27382,6 +28597,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorBufferInfo const& ) const = default;
+#else
bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( buffer == rhs.buffer )
@@ -27393,33 +28612,245 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
};
static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+ class Sampler
+ {
+ public:
+ using CType = VkSampler;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
+ : m_sampler(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_sampler(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
+ : m_sampler( sampler )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+ {
+ m_sampler = sampler;
+ return *this;
+ }
+#endif
+
+ Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_sampler = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Sampler const& ) const = default;
+#else
+ bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler == rhs.m_sampler;
+ }
+
+ bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler != rhs.m_sampler;
+ }
+
+ bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler < rhs.m_sampler;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSampler m_sampler;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Sampler;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Sampler;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Sampler;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class ImageView
+ {
+ public:
+ using CType = VkImageView;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
+
+ public:
+ VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
+ : m_imageView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_imageView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
+ : m_imageView( imageView )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
+ {
+ m_imageView = imageView;
+ return *this;
+ }
+#endif
+
+ ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_imageView = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageView const& ) const = default;
+#else
+ bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_imageView == rhs.m_imageView;
+ }
+
+ bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_imageView != rhs.m_imageView;
+ }
+
+ bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_imageView < rhs.m_imageView;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_imageView;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_imageView != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_imageView == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkImageView m_imageView;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
+ {
+ using type = VULKAN_HPP_NAMESPACE::ImageView;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ImageView;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ImageView;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
struct DescriptorImageInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {},
- VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : sampler( sampler_ )
- , imageView( imageView_ )
- , imageLayout( imageLayout_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
{}
+ VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorImageInfo ) );
return *this;
}
@@ -27441,6 +28872,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
@@ -27451,6 +28883,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorImageInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorImageInfo const& ) const = default;
+#else
bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sampler == rhs.sampler )
@@ -27462,31 +28898,45 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
struct DescriptorPoolSize
{
- VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , descriptorCount( descriptorCount_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), descriptorCount( descriptorCount_ )
{}
+ VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
+ return *this;
+ }
- DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorPoolSize ) );
return *this;
}
@@ -27502,6 +28952,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
@@ -27512,6 +28963,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorPoolSize*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorPoolSize const& ) const = default;
+#else
bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( type == rhs.type )
@@ -27522,40 +28977,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
uint32_t descriptorCount = {};
+
};
static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
struct DescriptorPoolCreateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {},
- uint32_t maxSets_ = {},
- uint32_t poolSizeCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , maxSets( maxSets_ )
- , poolSizeCount( poolSizeCount_ )
- , pPoolSizes( pPoolSizes_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) - offsetof( DescriptorPoolCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
+ : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorPoolCreateInfo ) );
return *this;
}
@@ -27589,6 +29055,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
+ pPoolSizes = poolSizes_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
@@ -27599,6 +29075,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorPoolCreateInfo const& ) const = default;
+#else
bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27613,6 +29093,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
@@ -27621,30 +29104,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxSets = {};
uint32_t poolSizeCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {};
+
};
static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
+ {
+ using Type = DescriptorPoolCreateInfo;
+ };
+
struct DescriptorPoolInlineUniformBlockCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) );
return *this;
}
@@ -27660,6 +29157,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
@@ -27670,6 +29168,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default;
+#else
bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27681,39 +29183,258 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
const void* pNext = {};
uint32_t maxInlineUniformBlockBindings = {};
+
};
static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct DescriptorSetAllocateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT>
+ {
+ using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT;
+ };
+
+ class DescriptorPool
+ {
+ public:
+ using CType = VkDescriptorPool;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
+ : m_descriptorPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorPool( descriptorPool )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorPool = descriptorPool;
+ return *this;
+ }
+#endif
+
+ DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorPool = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorPool const& ) const = default;
+#else
+ bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorPool == rhs.m_descriptorPool;
+ }
+
+ bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorPool != rhs.m_descriptorPool;
+ }
+
+ bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorPool < rhs.m_descriptorPool;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorPool;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorPool != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorPool == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorPool m_descriptorPool;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorPool>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
{
- VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {},
- uint32_t descriptorSetCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : descriptorPool( descriptorPool_ )
- , descriptorSetCount( descriptorSetCount_ )
- , pSetLayouts( pSetLayouts_ )
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class DescriptorSetLayout
+ {
+ public:
+ using CType = VkDescriptorSetLayout;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
+ : m_descriptorSetLayout(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorSetLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorSetLayout( descriptorSetLayout )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) - offsetof( DescriptorSetAllocateInfo, pNext ) );
+ m_descriptorSetLayout = descriptorSetLayout;
+ return *this;
+ }
+#endif
+
+ DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorSetLayout = VK_NULL_HANDLE;
return *this;
}
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetLayout const& ) const = default;
+#else
+ bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+ }
+
+ bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+ }
+
+ bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSetLayout;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSetLayout != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorSetLayout == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorSetLayout m_descriptorSetLayout;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSetLayout>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct DescriptorSetAllocateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+ : descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ )
+ : descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetAllocateInfo ) );
return *this;
}
@@ -27741,6 +29462,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
+ pSetLayouts = setLayouts_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
@@ -27751,6 +29482,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetAllocateInfo const& ) const = default;
+#else
bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27764,6 +29499,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
@@ -27771,32 +29509,49 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
uint32_t descriptorSetCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
+
};
static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
+ {
+ using Type = DescriptorSetAllocateInfo;
+ };
+
struct DescriptorSetLayoutBinding
{
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- uint32_t descriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
- const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
- : binding( binding_ )
- , descriptorType( descriptorType_ )
- , descriptorCount( descriptorCount_ )
- , stageFlags( stageFlags_ )
- , pImmutableSamplers( pImmutableSamplers_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+ : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
{}
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
+ : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutBinding ) );
return *this;
}
@@ -27830,6 +29585,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
+ pImmutableSamplers = immutableSamplers_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
@@ -27840,6 +29605,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetLayoutBinding const& ) const = default;
+#else
bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( binding == rhs.binding )
@@ -27853,6 +29622,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t binding = {};
@@ -27860,32 +29632,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t descriptorCount = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {};
+
};
static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
struct DescriptorSetLayoutBindingFlagsCreateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : bindingCount( bindingCount_ )
- , pBindingFlags( pBindingFlags_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT
+ : bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorSetLayoutBindingFlagsCreateInfo& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ )
+ : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) );
return *this;
}
@@ -27907,6 +29691,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
+ pBindingFlags = bindingFlags_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
@@ -27917,6 +29711,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default;
+#else
bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -27929,40 +29727,60 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
const void* pNext = {};
uint32_t bindingCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {};
+
};
static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
+ {
+ using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
+ };
+ using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
+
struct DescriptorSetLayoutCreateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {},
- uint32_t bindingCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , bindingCount( bindingCount_ )
- , pBindings( pBindings_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ )
+ : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
return *this;
}
@@ -27990,6 +29808,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindingCount = static_cast<uint32_t>( bindings_.size() );
+ pBindings = bindings_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
@@ -28000,6 +29828,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default;
+#else
bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -28013,6 +29845,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
@@ -28020,33 +29855,48 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
uint32_t bindingCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {};
+
};
static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
+ {
+ using Type = DescriptorSetLayoutCreateInfo;
+ };
+
struct DescriptorSetLayoutSupport
{
- DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT
- : supported( supported_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
+ : supported( supported_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
+ return *this;
+ }
- DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutSupport ) );
return *this;
}
+
operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
@@ -28057,6 +29907,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetLayoutSupport const& ) const = default;
+#else
bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -28068,37 +29922,59 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supported = {};
+
};
static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
+ {
+ using Type = DescriptorSetLayoutSupport;
+ };
+ using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+
struct DescriptorSetVariableDescriptorCountAllocateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {},
- const uint32_t* pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT
- : descriptorSetCount( descriptorSetCount_ )
- , pDescriptorCounts( pDescriptorCounts_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t* pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT
+ : descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorSetVariableDescriptorCountAllocateInfo& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
+ : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) );
return *this;
}
@@ -28120,6 +29996,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
+ pDescriptorCounts = descriptorCounts_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
@@ -28130,6 +30016,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default;
+#else
bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -28142,39 +30032,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
const void* pNext = {};
uint32_t descriptorSetCount = {};
const uint32_t* pDescriptorCounts = {};
+
};
static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
+ {
+ using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
+ };
+ using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
+
struct DescriptorSetVariableDescriptorCountLayoutSupport
{
- DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupport, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DescriptorSetVariableDescriptorCountLayoutSupport& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
return *this;
}
+ DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) );
+ return *this;
+ }
+
+
operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
@@ -28185,6 +30094,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default;
+#else
bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -28196,39 +30109,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
void* pNext = {};
uint32_t maxVariableDescriptorCount = {};
+
};
static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
+ {
+ using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
+ };
+ using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
+
struct DescriptorUpdateTemplateEntry
{
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {},
- uint32_t dstArrayElement_ = {},
- uint32_t descriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- size_t offset_ = {},
- size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
- : dstBinding( dstBinding_ )
- , dstArrayElement( dstArrayElement_ )
- , descriptorCount( descriptorCount_ )
- , descriptorType( descriptorType_ )
- , offset( offset_ )
- , stride( stride_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+ : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
{}
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
+ return *this;
+ }
- DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorUpdateTemplateEntry ) );
return *this;
}
@@ -28268,6 +30194,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
@@ -28278,6 +30205,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default;
+#else
bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( dstBinding == rhs.dstBinding )
@@ -28292,6 +30223,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t dstBinding = {};
@@ -28300,44 +30234,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
size_t offset = {};
size_t stride = {};
+
};
static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+ using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
struct DescriptorUpdateTemplateCreateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {},
- uint32_t descriptorUpdateEntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
- , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
- , templateType( templateType_ )
- , descriptorSetLayout( descriptorSetLayout_ )
- , pipelineBindPoint( pipelineBindPoint_ )
- , pipelineLayout( pipelineLayout_ )
- , set( set_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
{}
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} )
+ : flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) );
return *this;
}
@@ -28365,6 +30300,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
+ pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
{
templateType = templateType_;
@@ -28395,6 +30339,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
@@ -28405,6 +30350,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default;
+#else
bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -28423,6 +30372,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
@@ -28435,36 +30387,51 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
uint32_t set = {};
+
};
static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
+ {
+ using Type = DescriptorUpdateTemplateCreateInfo;
+ };
+ using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+
struct DeviceQueueCreateInfo
{
- VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
- uint32_t queueFamilyIndex_ = {},
- uint32_t queueCount_ = {},
- const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueFamilyIndex( queueFamilyIndex_ )
- , queueCount( queueCount_ )
- , pQueuePriorities( pQueuePriorities_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float* pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
+ : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceQueueCreateInfo ) );
return *this;
}
@@ -28498,6 +30465,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueCount = static_cast<uint32_t>( queuePriorities_.size() );
+ pQueuePriorities = queuePriorities_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
@@ -28508,6 +30485,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceQueueCreateInfo const& ) const = default;
+#else
bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -28522,6 +30503,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
@@ -28530,132 +30514,43 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyIndex = {};
uint32_t queueCount = {};
const float* pQueuePriorities = {};
+
};
static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
+ {
+ using Type = DeviceQueueCreateInfo;
+ };
+
struct PhysicalDeviceFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
- : robustBufferAccess( robustBufferAccess_ )
- , fullDrawIndexUint32( fullDrawIndexUint32_ )
- , imageCubeArray( imageCubeArray_ )
- , independentBlend( independentBlend_ )
- , geometryShader( geometryShader_ )
- , tessellationShader( tessellationShader_ )
- , sampleRateShading( sampleRateShading_ )
- , dualSrcBlend( dualSrcBlend_ )
- , logicOp( logicOp_ )
- , multiDrawIndirect( multiDrawIndirect_ )
- , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
- , depthClamp( depthClamp_ )
- , depthBiasClamp( depthBiasClamp_ )
- , fillModeNonSolid( fillModeNonSolid_ )
- , depthBounds( depthBounds_ )
- , wideLines( wideLines_ )
- , largePoints( largePoints_ )
- , alphaToOne( alphaToOne_ )
- , multiViewport( multiViewport_ )
- , samplerAnisotropy( samplerAnisotropy_ )
- , textureCompressionETC2( textureCompressionETC2_ )
- , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
- , textureCompressionBC( textureCompressionBC_ )
- , occlusionQueryPrecise( occlusionQueryPrecise_ )
- , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
- , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
- , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
- , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
- , shaderImageGatherExtended( shaderImageGatherExtended_ )
- , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
- , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
- , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
- , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
- , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
- , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
- , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
- , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
- , shaderClipDistance( shaderClipDistance_ )
- , shaderCullDistance( shaderCullDistance_ )
- , shaderFloat64( shaderFloat64_ )
- , shaderInt64( shaderInt64_ )
- , shaderInt16( shaderInt16_ )
- , shaderResourceResidency( shaderResourceResidency_ )
- , shaderResourceMinLod( shaderResourceMinLod_ )
- , sparseBinding( sparseBinding_ )
- , sparseResidencyBuffer( sparseResidencyBuffer_ )
- , sparseResidencyImage2D( sparseResidencyImage2D_ )
- , sparseResidencyImage3D( sparseResidencyImage3D_ )
- , sparseResidency2Samples( sparseResidency2Samples_ )
- , sparseResidency4Samples( sparseResidency4Samples_ )
- , sparseResidency8Samples( sparseResidency8Samples_ )
- , sparseResidency16Samples( sparseResidency16Samples_ )
- , sparseResidencyAliased( sparseResidencyAliased_ )
- , variableMultisampleRate( variableMultisampleRate_ )
- , inheritedQueries( inheritedQueries_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
+ : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
{}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFeatures ) );
return *this;
}
@@ -28989,6 +30884,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
@@ -28999,6 +30895,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( robustBufferAccess == rhs.robustBufferAccess )
@@ -29062,6 +30962,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
@@ -29119,44 +31022,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
+
};
static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
struct DeviceCreateInfo
{
- VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {},
- uint32_t queueCreateInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {},
- uint32_t enabledLayerCount_ = {},
- const char* const* ppEnabledLayerNames_ = {},
- uint32_t enabledExtensionCount_ = {},
- const char* const* ppEnabledExtensionNames_ = {},
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueCreateInfoCount( queueCreateInfoCount_ )
- , pQueueCreateInfos( pQueueCreateInfos_ )
- , enabledLayerCount( enabledLayerCount_ )
- , ppEnabledLayerNames( ppEnabledLayerNames_ )
- , enabledExtensionCount( enabledExtensionCount_ )
- , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
- , pEnabledFeatures( pEnabledFeatures_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} )
+ : flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceCreateInfo ) );
return *this;
}
@@ -29184,36 +31087,64 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
+ pQueueCreateInfos = queueCreateInfos_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = enabledLayerCount_;
return *this;
}
- DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+ {
+ enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+ ppEnabledLayerNames = pEnabledLayerNames_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = enabledExtensionCount_;
return *this;
}
- DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledExtensionNames = ppEnabledExtensionNames_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ {
+ enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+ ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledFeatures = pEnabledFeatures_;
return *this;
}
+
operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
@@ -29224,6 +31155,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDeviceCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceCreateInfo const& ) const = default;
+#else
bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -29242,6 +31177,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
@@ -29250,34 +31188,135 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueCreateInfoCount = {};
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
uint32_t enabledLayerCount = {};
- const char* const* ppEnabledLayerNames = {};
+ const char* const * ppEnabledLayerNames = {};
uint32_t enabledExtensionCount = {};
- const char* const* ppEnabledExtensionNames = {};
+ const char* const * ppEnabledExtensionNames = {};
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
+
};
static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DeviceEventInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceCreateInfo>
{
- VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
- : deviceEvent( deviceEvent_ )
+ using Type = DeviceCreateInfo;
+ };
+
+ struct DeviceDiagnosticsConfigCreateInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
return *this;
}
+ DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceDiagnosticsConfigCreateInfoNV ) );
+ return *this;
+ }
+
+ DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+
+ operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+ }
+
+ operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default;
+#else
+ bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
+
+ };
+ static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
+ {
+ using Type = DeviceDiagnosticsConfigCreateInfoNV;
+ };
+
+ struct DeviceEventInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT
+ : deviceEvent( deviceEvent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceEventInfoEXT ) );
return *this;
}
@@ -29293,6 +31332,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
@@ -29303,6 +31343,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceEventInfoEXT const& ) const = default;
+#else
bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -29314,6427 +31358,15553 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
+
};
static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGeneratedCommandsFeaturesNVX
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
+ {
+ using Type = DeviceEventInfoEXT;
+ };
+
+ struct DeviceGroupBindSparseInfo
{
- VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = {} ) VULKAN_HPP_NOEXCEPT
- : computeBindingPointSupport( computeBindingPointSupport_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
+ return *this;
}
- DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupBindSparseInfo ) );
return *this;
}
- DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
- computeBindingPointSupport = computeBindingPointSupport_;
+ resourceDeviceIndex = resourceDeviceIndex_;
return *this;
}
- operator VkDeviceGeneratedCommandsFeaturesNVX const&() const VULKAN_HPP_NOEXCEPT
+ DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>( this );
+ memoryDeviceIndex = memoryDeviceIndex_;
+ return *this;
}
- operator VkDeviceGeneratedCommandsFeaturesNVX &() VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+ }
+
+ operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( this );
+ return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
}
- bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default;
+#else
+ bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
+ && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+ && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
}
- bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport = {};
+ uint32_t resourceDeviceIndex = {};
+ uint32_t memoryDeviceIndex = {};
+
};
- static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGeneratedCommandsFeaturesNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
+ {
+ using Type = DeviceGroupBindSparseInfo;
+ };
+ using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
- struct DeviceGeneratedCommandsLimitsNVX
+ struct DeviceGroupCommandBufferBeginInfo
{
- VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = {},
- uint32_t maxObjectEntryCounts_ = {},
- uint32_t minSequenceCountBufferOffsetAlignment_ = {},
- uint32_t minSequenceIndexBufferOffsetAlignment_ = {},
- uint32_t minCommandsTokenBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
- , maxObjectEntryCounts( maxObjectEntryCounts_ )
- , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
- , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
- , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceMask( deviceMask_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
+ return *this;
}
- DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) );
return *this;
}
- DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceMask = deviceMask_;
+ return *this;
+ }
+
+
+ operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+ }
+
+ operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default;
+#else
+ bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceMask == rhs.deviceMask );
+ }
+
+ bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+ const void* pNext = {};
+ uint32_t deviceMask = {};
+
+ };
+ static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
+ {
+ using Type = DeviceGroupCommandBufferBeginInfo;
+ };
+ using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+
+ class DisplayKHR
+ {
+ public:
+ using CType = VkDisplayKHR;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
+ : m_displayKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_displayKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
+ : m_displayKHR( displayKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+ {
+ m_displayKHR = displayKHR;
+ return *this;
+ }
+#endif
+
+ DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
+ m_displayKHR = VK_NULL_HANDLE;
return *this;
}
- DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayKHR const& ) const = default;
+#else
+ bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayKHR == rhs.m_displayKHR;
+ }
+
+ bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayKHR != rhs.m_displayKHR;
+ }
+
+ bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayKHR < rhs.m_displayKHR;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayKHR;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDisplayKHR m_displayKHR;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayKHR>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct PerformanceConfigurationAcquireInfoINTEL
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT
+ : type( type_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- maxObjectEntryCounts = maxObjectEntryCounts_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
return *this;
}
- DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceConfigurationAcquireInfoINTEL ) );
return *this;
}
- DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
+ pNext = pNext_;
return *this;
}
- DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
- minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
+ type = type_;
return *this;
}
- operator VkDeviceGeneratedCommandsLimitsNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>( this );
+ return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
}
- operator VkDeviceGeneratedCommandsLimitsNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( this );
+ return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
}
- bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
- && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
- && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
- && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
- && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
+ && ( type == rhs.type );
}
- bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
const void* pNext = {};
- uint32_t maxIndirectCommandsLayoutTokenCount = {};
- uint32_t maxObjectEntryCounts = {};
- uint32_t minSequenceCountBufferOffsetAlignment = {};
- uint32_t minSequenceIndexBufferOffsetAlignment = {};
- uint32_t minCommandsTokenBufferOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
+
};
- static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGeneratedCommandsLimitsNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupBindSparseInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
+ {
+ using Type = PerformanceConfigurationAcquireInfoINTEL;
+ };
+
+ class PerformanceConfigurationINTEL
{
- VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {},
- uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : resourceDeviceIndex( resourceDeviceIndex_ )
- , memoryDeviceIndex( memoryDeviceIndex_ )
+ public:
+ using CType = VkPerformanceConfigurationINTEL;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
+ : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+ : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) );
+ m_performanceConfigurationINTEL = performanceConfigurationINTEL;
return *this;
}
+#endif
- DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_performanceConfigurationINTEL = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceConfigurationINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
+ }
+
+ bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+ }
+
+ bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_performanceConfigurationINTEL;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePerformanceConfigurationINTEL>
+ {
+ using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
+ };
+
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class QueryPool
+ {
+ public:
+ using CType = VkQueryPool;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
+
+ public:
+ VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
+ : m_queryPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_queryPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
+ : m_queryPool( queryPool )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
+ {
+ m_queryPool = queryPool;
+ return *this;
+ }
+#endif
+
+ QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_queryPool = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueryPool const& ) const = default;
+#else
+ bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queryPool == rhs.m_queryPool;
+ }
+
+ bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queryPool != rhs.m_queryPool;
+ }
+
+ bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queryPool < rhs.m_queryPool;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queryPool;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queryPool != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queryPool == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkQueryPool m_queryPool;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
+ {
+ using type = VULKAN_HPP_NAMESPACE::QueryPool;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct RenderPassBeginInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
+ : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
return *this;
}
- DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassBeginInfo ) );
+ return *this;
+ }
+
+ RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
- resourceDeviceIndex = resourceDeviceIndex_;
+ renderPass = renderPass_;
return *this;
}
- DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
- memoryDeviceIndex = memoryDeviceIndex_;
+ framebuffer = framebuffer_;
return *this;
}
- operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+ renderArea = renderArea_;
+ return *this;
}
- operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
+ clearValueCount = clearValueCount_;
+ return *this;
}
- bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pClearValues = pClearValues_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ clearValueCount = static_cast<uint32_t>( clearValues_.size() );
+ pClearValues = clearValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
+ }
+
+ operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassBeginInfo const& ) const = default;
+#else
+ bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
- && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+ && ( renderPass == rhs.renderPass )
+ && ( framebuffer == rhs.framebuffer )
+ && ( renderArea == rhs.renderArea )
+ && ( clearValueCount == rhs.clearValueCount )
+ && ( pClearValues == rhs.pClearValues );
}
- bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
const void* pNext = {};
- uint32_t resourceDeviceIndex = {};
- uint32_t memoryDeviceIndex = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+ uint32_t clearValueCount = {};
+ const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
+
};
- static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupCommandBufferBeginInfo
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
+ {
+ using Type = RenderPassBeginInfo;
+ };
+
+ struct SubpassBeginInfo
{
- VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceMask( deviceMask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
+ : contents( contents_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
+ return *this;
}
- DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassBeginInfo ) );
return *this;
}
- DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
{
- deviceMask = deviceMask_;
+ contents = contents_;
return *this;
}
- operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+ return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
}
- operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+ return *reinterpret_cast<VkSubpassBeginInfo*>( this );
}
- bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassBeginInfo const& ) const = default;
+#else
+ bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( deviceMask == rhs.deviceMask );
+ && ( contents == rhs.contents );
}
- bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
const void* pNext = {};
- uint32_t deviceMask = {};
+ VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
+
};
- static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupDeviceCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassBeginInfo>
+ {
+ using Type = SubpassBeginInfo;
+ };
+ using SubpassBeginInfoKHR = SubpassBeginInfo;
+
+ struct ImageBlit
{
- VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {},
- const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT
- : physicalDeviceCount( physicalDeviceCount_ )
- , pPhysicalDevices( pPhysicalDevices_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
return *this;
}
- DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageBlit ) );
+ return *this;
+ }
+
+ ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffsets = srcOffsets_;
+ return *this;
+ }
+
+ ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffsets = dstOffsets_;
+ return *this;
+ }
+
+
+ operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageBlit*>( this );
+ }
+
+ operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageBlit*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageBlit const& ) const = default;
+#else
+ bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffsets == rhs.srcOffsets )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffsets == rhs.dstOffsets );
+ }
+
+ bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+
+ };
+ static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageSubresourceRange
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
return *this;
}
- DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSubresourceRange ) );
return *this;
}
- DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
- physicalDeviceCount = physicalDeviceCount_;
+ aspectMask = aspectMask_;
return *this;
}
- DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
{
- pPhysicalDevices = pPhysicalDevices_;
+ baseMipLevel = baseMipLevel_;
return *this;
}
- operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
+ levelCount = levelCount_;
+ return *this;
}
- operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
+ baseArrayLayer = baseArrayLayer_;
+ return *this;
}
- bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( physicalDeviceCount == rhs.physicalDeviceCount )
- && ( pPhysicalDevices == rhs.pPhysicalDevices );
+ layerCount = layerCount_;
+ return *this;
}
- bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageSubresourceRange*>( this );
+ }
+
+ operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageSubresourceRange*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageSubresourceRange const& ) const = default;
+#else
+ bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( baseMipLevel == rhs.baseMipLevel )
+ && ( levelCount == rhs.levelCount )
+ && ( baseArrayLayer == rhs.baseArrayLayer )
+ && ( layerCount == rhs.layerCount );
+ }
+
+ bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
- const void* pNext = {};
- uint32_t physicalDeviceCount = {};
- const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t baseMipLevel = {};
+ uint32_t levelCount = {};
+ uint32_t baseArrayLayer = {};
+ uint32_t layerCount = {};
+
};
- static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupPresentCapabilitiesKHR
+ struct ImageCopy
{
- DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = {},
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
- : presentMask{}
- , modes( modes_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,VK_MAX_DEVICE_GROUP_SIZE,VK_MAX_DEVICE_GROUP_SIZE>::copy( presentMask, presentMask_ );
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
return *this;
}
- DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageCopy ) );
+ return *this;
+ }
+
+ ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCopy*>( this );
+ }
+
+ operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCopy*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageCopy const& ) const = default;
+#else
+ bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+ };
+ static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
+
+ struct SubpassEndInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
+
+ {}
+
+ VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
return *this;
}
- operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassEndInfo ) );
+ return *this;
}
- operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassEndInfo*>( this );
+ }
+
+ operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassEndInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassEndInfo const& ) const = default;
+#else
+ bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 )
- && ( modes == rhs.modes );
+ && ( pNext == rhs.pNext );
}
- bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
const void* pNext = {};
- uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] = {};
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
};
- static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupPresentInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassEndInfo>
{
- VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {},
- const uint32_t* pDeviceMasks_ = {},
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
- , pDeviceMasks( pDeviceMasks_ )
- , mode( mode_ )
+ using Type = SubpassEndInfo;
+ };
+ using SubpassEndInfoKHR = SubpassEndInfo;
+
+ class IndirectCommandsLayoutNV
+ {
+ public:
+ using CType = VkIndirectCommandsLayoutNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
+ : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
+ : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) );
+ m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
return *this;
}
+#endif
- DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
+#else
+ bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
+ }
+
+ bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
+ }
+
+ bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_indirectCommandsLayoutNV;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
+ {
+ using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+ };
+
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct IndirectCommandsStreamNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
return *this;
}
- DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( IndirectCommandsStreamNV ) );
+ return *this;
+ }
+
+ IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+
+ operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
+ }
+
+ operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
+#else
+ bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( buffer == rhs.buffer )
+ && ( offset == rhs.offset );
+ }
+
+ bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+
+ };
+ static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct GeneratedCommandsInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
+ : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GeneratedCommandsInfoNV ) );
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
- swapchainCount = swapchainCount_;
+ pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
- pDeviceMasks = pDeviceMasks_;
+ pipeline = pipeline_;
return *this;
}
- DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
- mode = mode_;
+ indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
- operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
+ streamCount = streamCount_;
+ return *this;
}
- operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
+ pStreams = pStreams_;
+ return *this;
}
- bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
+ {
+ streamCount = static_cast<uint32_t>( streams_.size() );
+ pStreams = streams_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesCount = sequencesCount_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preprocessBuffer = preprocessBuffer_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preprocessOffset = preprocessOffset_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preprocessSize = preprocessSize_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesCountBuffer = sequencesCountBuffer_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesCountOffset = sequencesCountOffset_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesIndexBuffer = sequencesIndexBuffer_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesIndexOffset = sequencesIndexOffset_;
+ return *this;
+ }
+
+
+ operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
+ }
+
+ operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
+#else
+ bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( swapchainCount == rhs.swapchainCount )
- && ( pDeviceMasks == rhs.pDeviceMasks )
- && ( mode == rhs.mode );
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( pipeline == rhs.pipeline )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( streamCount == rhs.streamCount )
+ && ( pStreams == rhs.pStreams )
+ && ( sequencesCount == rhs.sequencesCount )
+ && ( preprocessBuffer == rhs.preprocessBuffer )
+ && ( preprocessOffset == rhs.preprocessOffset )
+ && ( preprocessSize == rhs.preprocessSize )
+ && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+ && ( sequencesCountOffset == rhs.sequencesCountOffset )
+ && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+ && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
}
- bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
const void* pNext = {};
- uint32_t swapchainCount = {};
- const uint32_t* pDeviceMasks = {};
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+ uint32_t streamCount = {};
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
+ uint32_t sequencesCount = {};
+ VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
+ VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
+ VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
+
};
- static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupRenderPassBeginInfo
+ template <>
+ struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
{
- VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {},
- uint32_t deviceRenderAreaCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceMask( deviceMask_ )
- , deviceRenderAreaCount( deviceRenderAreaCount_ )
- , pDeviceRenderAreas( pDeviceRenderAreas_ )
+ using Type = GeneratedCommandsInfoNV;
+ };
+
+ struct MemoryBarrier
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
return *this;
}
- DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryBarrier ) );
return *this;
}
- DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- deviceMask = deviceMask_;
+ pNext = pNext_;
return *this;
}
- DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
- deviceRenderAreaCount = deviceRenderAreaCount_;
+ srcAccessMask = srcAccessMask_;
return *this;
}
- DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
- pDeviceRenderAreas = pDeviceRenderAreas_;
+ dstAccessMask = dstAccessMask_;
return *this;
}
- operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
+ return *reinterpret_cast<const VkMemoryBarrier*>( this );
}
- operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
+ return *reinterpret_cast<VkMemoryBarrier*>( this );
}
- bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryBarrier const& ) const = default;
+#else
+ bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( deviceMask == rhs.deviceMask )
- && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
- && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask );
}
- bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
const void* pNext = {};
- uint32_t deviceMask = {};
- uint32_t deviceRenderAreaCount = {};
- const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+
};
- static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupSubmitInfo
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryBarrier>
+ {
+ using Type = MemoryBarrier;
+ };
+
+ struct ImageMemoryBarrier
{
- VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {},
- const uint32_t* pWaitSemaphoreDeviceIndices_ = {},
- uint32_t commandBufferCount_ = {},
- const uint32_t* pCommandBufferDeviceMasks_ = {},
- uint32_t signalSemaphoreCount_ = {},
- const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
- , commandBufferCount( commandBufferCount_ )
- , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
+ return *this;
}
- DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageMemoryBarrier ) );
return *this;
}
- DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
- waitSemaphoreCount = waitSemaphoreCount_;
+ srcAccessMask = srcAccessMask_;
return *this;
}
- DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
- pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
+ dstAccessMask = dstAccessMask_;
return *this;
}
- DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
- commandBufferCount = commandBufferCount_;
+ oldLayout = oldLayout_;
return *this;
}
- DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
- pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+ newLayout = newLayout_;
return *this;
}
- DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
- signalSemaphoreCount = signalSemaphoreCount_;
+ srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
- DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
- pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+ dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
}
- operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
+ image = image_;
+ return *this;
}
- operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
+ subresourceRange = subresourceRange_;
+ return *this;
}
- bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+ }
+
+ operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageMemoryBarrier const& ) const = default;
+#else
+ bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
- && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
- && ( commandBufferCount == rhs.commandBufferCount )
- && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
- && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
- && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( oldLayout == rhs.oldLayout )
+ && ( newLayout == rhs.newLayout )
+ && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+ && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+ && ( image == rhs.image )
+ && ( subresourceRange == rhs.subresourceRange );
}
- bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
const void* pNext = {};
- uint32_t waitSemaphoreCount = {};
- const uint32_t* pWaitSemaphoreDeviceIndices = {};
- uint32_t commandBufferCount = {};
- const uint32_t* pCommandBufferDeviceMasks = {};
- uint32_t signalSemaphoreCount = {};
- const uint32_t* pSignalSemaphoreDeviceIndices = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t srcQueueFamilyIndex = {};
+ uint32_t dstQueueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
};
- static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
- struct DeviceGroupSwapchainCreateInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eImageMemoryBarrier>
+ {
+ using Type = ImageMemoryBarrier;
+ };
+
+ class BufferView
{
- VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
- : modes( modes_ )
+ public:
+ using CType = VkBufferView;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
+
+ public:
+ VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
+ : m_bufferView(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_bufferView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
+ : m_bufferView( bufferView )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) );
+ m_bufferView = bufferView;
return *this;
}
+#endif
- DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_bufferView = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferView const& ) const = default;
+#else
+ bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView == rhs.m_bufferView;
+ }
+
+ bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView != rhs.m_bufferView;
+ }
+
+ bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView < rhs.m_bufferView;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkBufferView m_bufferView;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
+ {
+ using type = VULKAN_HPP_NAMESPACE::BufferView;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::BufferView;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::BufferView;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct WriteDescriptorSet
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
+ : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
+ : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>(&rhs);
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
+#else
+ if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
return *this;
}
- DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( WriteDescriptorSet ) );
+ return *this;
+ }
+
+ WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
- modes = modes_;
+ dstSet = dstSet_;
return *this;
}
- operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
+ dstBinding = dstBinding_;
+ return *this;
}
- operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
+ dstArrayElement = dstArrayElement_;
+ return *this;
}
- bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+ WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorType = descriptorType_;
+ return *this;
+ }
+
+ WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pImageInfo = pImageInfo_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
+ pImageInfo = imageInfo_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pBufferInfo = pBufferInfo_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
+ pBufferInfo = bufferInfo_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pTexelBufferView = pTexelBufferView_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
+ pTexelBufferView = texelBufferView_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+ }
+
+ operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkWriteDescriptorSet*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( WriteDescriptorSet const& ) const = default;
+#else
+ bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( modes == rhs.modes );
+ && ( dstSet == rhs.dstSet )
+ && ( dstBinding == rhs.dstBinding )
+ && ( dstArrayElement == rhs.dstArrayElement )
+ && ( descriptorCount == rhs.descriptorCount )
+ && ( descriptorType == rhs.descriptorType )
+ && ( pImageInfo == rhs.pImageInfo )
+ && ( pBufferInfo == rhs.pBufferInfo )
+ && ( pTexelBufferView == rhs.pTexelBufferView );
}
- bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+ uint32_t dstBinding = {};
+ uint32_t dstArrayElement = {};
+ uint32_t descriptorCount = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
+ const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
+
};
- static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
- struct DeviceMemoryOpaqueCaptureAddressInfo
+ template <>
+ struct CppType<StructureType, StructureType::eWriteDescriptorSet>
+ {
+ using Type = WriteDescriptorSet;
+ };
+
+ class DescriptorUpdateTemplate
{
- VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ public:
+ using CType = VkDescriptorUpdateTemplate;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
+ : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) - offsetof( DeviceMemoryOpaqueCaptureAddressInfo, pNext ) );
+ m_descriptorUpdateTemplate = descriptorUpdateTemplate;
return *this;
}
+#endif
- DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
+#else
+ bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
+ }
+
+ bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+ }
+
+ bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+ using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+
+ class Event
+ {
+ public:
+ using CType = VkEvent;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
+ : m_event(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_event(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
+ : m_event( event )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+ {
+ m_event = event;
+ return *this;
+ }
+#endif
+
+ Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_event = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Event const& ) const = default;
+#else
+ bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event == rhs.m_event;
+ }
+
+ bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event != rhs.m_event;
+ }
+
+ bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event < rhs.m_event;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkEvent m_event;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Event;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Event;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Event;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct ImageResolve
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceMemoryOpaqueCaptureAddressInfo& operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
return *this;
}
- DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageResolve ) );
+ return *this;
+ }
+
+ ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageResolve*>( this );
+ }
+
+ operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageResolve*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageResolve const& ) const = default;
+#else
+ bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+ };
+ static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageResolve2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
+ return *this;
+ }
+
+ ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageResolve2KHR ) );
+ return *this;
+ }
+
+ ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- memory = memory_;
+ srcSubresource = srcSubresource_;
return *this;
}
- operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+ srcOffset = srcOffset_;
+ return *this;
}
- operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+ dstSubresource = dstSubresource_;
+ return *this;
}
- bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageResolve2KHR*>( this );
+ }
+
+ operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageResolve2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageResolve2KHR const& ) const = default;
+#else
+ bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memory == rhs.memory );
+ && ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
}
- bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
};
- static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
- struct DeviceMemoryOverallocationCreateInfoAMD
+ template <>
+ struct CppType<StructureType, StructureType::eImageResolve2KHR>
{
- VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
- : overallocationBehavior( overallocationBehavior_ )
+ using Type = ImageResolve2KHR;
+ };
+
+ struct ResolveImageInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) );
- return *this;
+ *this = rhs;
}
- DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
+ return *this;
}
- DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ResolveImageInfo2KHR ) );
return *this;
}
- DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
- overallocationBehavior = overallocationBehavior_;
+ srcImage = srcImage_;
return *this;
}
- operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+ srcImageLayout = srcImageLayout_;
+ return *this;
}
- operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+ dstImage = dstImage_;
+ return *this;
}
- bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
+ }
+
+ operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
+#else
+ bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( overallocationBehavior == rhs.overallocationBehavior );
+ && ( srcImage == rhs.srcImage )
+ && ( srcImageLayout == rhs.srcImageLayout )
+ && ( dstImage == rhs.dstImage )
+ && ( dstImageLayout == rhs.dstImageLayout )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
}
- bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
+
};
- static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
- struct DeviceQueueGlobalPriorityCreateInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
+ {
+ using Type = ResolveImageInfo2KHR;
+ };
+
+ struct PerformanceMarkerInfoINTEL
{
- VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
- : globalPriority( globalPriority_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+ : marker( marker_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
+ return *this;
}
- DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceMarkerInfoINTEL ) );
return *this;
}
- DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
{
- globalPriority = globalPriority_;
+ marker = marker_;
return *this;
}
- operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
}
- operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
}
- bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( globalPriority == rhs.globalPriority );
+ && ( marker == rhs.marker );
}
- bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
+ uint64_t marker = {};
+
};
- static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- struct DeviceQueueInfo2
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
+ {
+ using Type = PerformanceMarkerInfoINTEL;
+ };
+
+ struct PerformanceOverrideInfoINTEL
{
- VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
- uint32_t queueFamilyIndex_ = {},
- uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueFamilyIndex( queueFamilyIndex_ )
- , queueIndex( queueIndex_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), enable( enable_ ), parameter( parameter_ )
{}
- VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
+ return *this;
}
- DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceOverrideInfoINTEL ) );
return *this;
}
- DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ type = type_;
return *this;
}
- DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
{
- queueFamilyIndex = queueFamilyIndex_;
+ enable = enable_;
return *this;
}
- DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
{
- queueIndex = queueIndex_;
+ parameter = parameter_;
return *this;
}
- operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
+ return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
}
- operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
+ return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
}
- bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( queueFamilyIndex == rhs.queueFamilyIndex )
- && ( queueIndex == rhs.queueIndex );
+ && ( type == rhs.type )
+ && ( enable == rhs.enable )
+ && ( parameter == rhs.parameter );
}
- bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
- uint32_t queueFamilyIndex = {};
- uint32_t queueIndex = {};
+ VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+ VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+ uint64_t parameter = {};
+
};
- static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- struct DispatchIndirectCommand
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
{
- VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {},
- uint32_t y_ = {},
- uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
- , z( z_ )
+ using Type = PerformanceOverrideInfoINTEL;
+ };
+
+ struct PerformanceStreamMarkerInfoINTEL
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+ : marker( marker_ )
{}
- DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
return *this;
}
- DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceStreamMarkerInfoINTEL ) );
+ return *this;
+ }
+
+ PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+ {
+ marker = marker_;
+ return *this;
+ }
+
+
+ operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+ }
+
+ operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( marker == rhs.marker );
+ }
+
+ bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+ const void* pNext = {};
+ uint32_t marker = {};
+
+ };
+ static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
+ {
+ using Type = PerformanceStreamMarkerInfoINTEL;
+ };
+
+ struct Viewport
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
+ return *this;
+ }
+
+ Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Viewport ) );
+ return *this;
+ }
+
+ Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
- DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
+ Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
- DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
+ Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
{
- z = z_;
+ width = width_;
return *this;
}
- operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+ Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
+ height = height_;
+ return *this;
}
- operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
+ minDepth = minDepth_;
+ return *this;
}
- bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDepth = maxDepth_;
+ return *this;
+ }
+
+
+ operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkViewport*>( this );
+ }
+
+ operator VkViewport &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkViewport*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Viewport const& ) const = default;
+#else
+ bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( x == rhs.x )
&& ( y == rhs.y )
- && ( z == rhs.z );
+ && ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( minDepth == rhs.minDepth )
+ && ( maxDepth == rhs.maxDepth );
}
- bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t x = {};
- uint32_t y = {};
- uint32_t z = {};
+ float x = {};
+ float y = {};
+ float width = {};
+ float height = {};
+ float minDepth = {};
+ float maxDepth = {};
+
};
- static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
- struct DisplayEventInfoEXT
+ struct ShadingRatePaletteNV
{
- VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
- : displayEvent( displayEvent_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
+ : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) - offsetof( DisplayEventInfoEXT, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
+ : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
return *this;
}
- DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ShadingRatePaletteNV ) );
+ return *this;
}
- DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>(&rhs);
+ shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
return *this;
}
- DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
return *this;
}
- DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
{
- displayEvent = displayEvent_;
+ shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
+ pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
+ return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
}
- operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
+ return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
}
- bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShadingRatePaletteNV const& ) const = default;
+#else
+ bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( displayEvent == rhs.displayEvent );
+ return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
+ && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
}
- bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
+ uint32_t shadingRatePaletteEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
+
};
- static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
- struct DisplayModeParametersKHR
+ struct ViewportWScalingNV
{
- VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {},
- uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
- : visibleRegion( visibleRegion_ )
- , refreshRate( refreshRate_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
+ : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
{}
- DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
return *this;
}
- DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ ) VULKAN_HPP_NOEXCEPT
+ ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- visibleRegion = visibleRegion_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ViewportWScalingNV ) );
return *this;
}
- DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
+ ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
{
- refreshRate = refreshRate_;
+ xcoeff = xcoeff_;
return *this;
}
- operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT
+ ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
+ ycoeff = ycoeff_;
+ return *this;
}
- operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
+ return *reinterpret_cast<const VkViewportWScalingNV*>( this );
}
- bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
{
- return ( visibleRegion == rhs.visibleRegion )
- && ( refreshRate == rhs.refreshRate );
+ return *reinterpret_cast<VkViewportWScalingNV*>( this );
}
- bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ViewportWScalingNV const& ) const = default;
+#else
+ bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( xcoeff == rhs.xcoeff )
+ && ( ycoeff == rhs.ycoeff );
+ }
+
+ bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
- uint32_t refreshRate = {};
+ float xcoeff = {};
+ float ycoeff = {};
+
};
- static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
- struct DisplayModeCreateInfoKHR
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct StridedBufferRegionKHR
{
- VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , parameters( parameters_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ ), stride( stride_ ), size( size_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ StridedBufferRegionKHR( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) - offsetof( DisplayModeCreateInfoKHR, pNext ) );
+ *this = rhs;
+ }
+
+ explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const& indirectCommandsStreamNV, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} )
+ : buffer( indirectCommandsStreamNV.buffer )
+ , offset( indirectCommandsStreamNV.offset )
+ , stride( stride_ )
+ , size( size_ )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ StridedBufferRegionKHR & operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR const *>( &rhs );
return *this;
}
- DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ StridedBufferRegionKHR & operator=( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( StridedBufferRegionKHR ) );
+ return *this;
}
- DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ StridedBufferRegionKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>(&rhs);
+ buffer = buffer_;
return *this;
}
- DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ StridedBufferRegionKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ offset = offset_;
return *this;
}
- DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ StridedBufferRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ stride = stride_;
return *this;
}
- DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ ) VULKAN_HPP_NOEXCEPT
+ StridedBufferRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
- parameters = parameters_;
+ size = size_;
return *this;
}
- operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkStridedBufferRegionKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
+ return *reinterpret_cast<const VkStridedBufferRegionKHR*>( this );
}
- operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkStridedBufferRegionKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
+ return *reinterpret_cast<VkStridedBufferRegionKHR*>( this );
}
- bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( StridedBufferRegionKHR const& ) const = default;
+#else
+ bool operator==( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( parameters == rhs.parameters );
+ return ( buffer == rhs.buffer )
+ && ( offset == rhs.offset )
+ && ( stride == rhs.stride )
+ && ( size == rhs.size );
}
- bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
};
- static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<StridedBufferRegionKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- struct DisplayModePropertiesKHR
+ class CommandBuffer
{
- DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
- VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayMode( displayMode_ )
- , parameters( parameters_ )
+ public:
+ using CType = VkCommandBuffer;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
+
+ public:
+ VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
+ : m_commandBuffer(VK_NULL_HANDLE)
{}
- DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_commandBuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
+ : m_commandBuffer( commandBuffer )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ m_commandBuffer = commandBuffer;
+ return *this;
}
+#endif
- DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>(&rhs);
+ m_commandBuffer = VK_NULL_HANDLE;
return *this;
}
- operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBuffer const& ) const = default;
+#else
+ bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
+ return m_commandBuffer == rhs.m_commandBuffer;
}
- operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
+ return m_commandBuffer != rhs.m_commandBuffer;
}
- bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( displayMode == rhs.displayMode )
- && ( parameters == rhs.parameters );
+ return m_commandBuffer < rhs.m_commandBuffer;
}
+#endif
- bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &counterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const &descriptorSets, ArrayProxy<const uint32_t> const &dynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &sizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &offsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &sizes, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &strides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const &regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const &pOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const &attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const &rects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const &ranges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const &ranges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const &regions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const &regions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const &regions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const &regions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &counterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const &bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &imageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const &values, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const &descriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const &regions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setBlendConstants( const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setBlendConstants( const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &customSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDeviceMask( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDeviceMask( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &discardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &exclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineWidth( float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineWidth( float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &scissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &scissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const &viewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &shadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const &viewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const &viewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const &events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const &bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &imageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_commandBuffer;
}
- public:
- VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
- VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkCommandBuffer m_commandBuffer;
};
- static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
- struct DisplayModeProperties2KHR
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
+ {
+ using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
{
- DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayModeProperties( displayModeProperties_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct MemoryAllocateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : allocationSize( allocationSize_ ), memoryTypeIndex( memoryTypeIndex_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) - offsetof( DisplayModeProperties2KHR, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
return *this;
}
- DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryAllocateInfo ) );
+ return *this;
}
- DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+ MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
+ allocationSize = allocationSize_;
+ return *this;
}
- operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
+ memoryTypeIndex = memoryTypeIndex_;
+ return *this;
}
- bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
+ }
+
+ operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryAllocateInfo const& ) const = default;
+#else
+ bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( displayModeProperties == rhs.displayModeProperties );
+ && ( allocationSize == rhs.allocationSize )
+ && ( memoryTypeIndex == rhs.memoryTypeIndex );
}
- bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+ uint32_t memoryTypeIndex = {};
+
};
- static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayNativeHdrSurfaceCapabilitiesAMD
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
+ {
+ using Type = MemoryAllocateInfo;
+ };
+
+ class PipelineCache
{
- DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT
- : localDimmingSupport( localDimmingSupport_ )
+ public:
+ using CType = VkPipelineCache;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
+
+ public:
+ VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT
+ : m_pipelineCache(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_pipelineCache(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
+ : m_pipelineCache( pipelineCache )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) );
+ m_pipelineCache = pipelineCache;
return *this;
}
+#endif
- DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ m_pipelineCache = VK_NULL_HANDLE;
+ return *this;
}
- DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCache const& ) const = default;
+#else
+ bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>(&rhs);
- return *this;
+ return m_pipelineCache == rhs.m_pipelineCache;
}
- operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT
+ bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+ return m_pipelineCache != rhs.m_pipelineCache;
}
- operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
+ bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+ return m_pipelineCache < rhs.m_pipelineCache;
}
+#endif
- bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( localDimmingSupport == rhs.localDimmingSupport );
+ return m_pipelineCache;
}
- bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_pipelineCache != VK_NULL_HANDLE;
}
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_pipelineCache == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPipelineCache m_pipelineCache;
};
- static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
- struct DisplayPlaneCapabilitiesKHR
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipelineCache>
+ {
+ using type = VULKAN_HPP_NAMESPACE::PipelineCache;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache>
{
- DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportedAlpha( supportedAlpha_ )
- , minSrcPosition( minSrcPosition_ )
- , maxSrcPosition( maxSrcPosition_ )
- , minSrcExtent( minSrcExtent_ )
- , maxSrcExtent( maxSrcExtent_ )
- , minDstPosition( minDstPosition_ )
- , maxDstPosition( maxDstPosition_ )
- , minDstExtent( minDstExtent_ )
- , maxDstExtent( maxDstExtent_ )
+ using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineCache>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct EventCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
- DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
return *this;
}
- operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( EventCreateInfo ) );
+ return *this;
}
- operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return ( supportedAlpha == rhs.supportedAlpha )
- && ( minSrcPosition == rhs.minSrcPosition )
- && ( maxSrcPosition == rhs.maxSrcPosition )
- && ( minSrcExtent == rhs.minSrcExtent )
- && ( maxSrcExtent == rhs.maxSrcExtent )
- && ( minDstPosition == rhs.minDstPosition )
- && ( maxDstPosition == rhs.maxDstPosition )
- && ( minDstExtent == rhs.minDstExtent )
- && ( maxDstExtent == rhs.maxDstExtent );
+ flags = flags_;
+ return *this;
}
- bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkEventCreateInfo*>( this );
+ }
+
+ operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkEventCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( EventCreateInfo const& ) const = default;
+#else
+ bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
- VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
- VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
- VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
- VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
- VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
- VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
+
};
- static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPlaneCapabilities2KHR
+ template <>
+ struct CppType<StructureType, StructureType::eEventCreateInfo>
+ {
+ using Type = EventCreateInfo;
+ };
+
+ struct FenceCreateInfo
{
- DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT
- : capabilities( capabilities_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
return *this;
}
- DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FenceCreateInfo ) );
+ return *this;
}
- DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
+ FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
+ return *reinterpret_cast<const VkFenceCreateInfo*>( this );
}
- bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkFenceCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FenceCreateInfo const& ) const = default;
+#else
+ bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( capabilities == rhs.capabilities );
+ && ( flags == rhs.flags );
}
- bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
+
};
- static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPlaneInfo2KHR
+ template <>
+ struct CppType<StructureType, StructureType::eFenceCreateInfo>
{
- VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {},
- uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : mode( mode_ )
- , planeIndex( planeIndex_ )
+ using Type = FenceCreateInfo;
+ };
+
+ struct FramebufferCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), width( width_ ), height( height_ ), layers( layers_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) - offsetof( DisplayPlaneInfo2KHR, pNext ) );
- return *this;
+ *this = rhs;
}
- DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {} )
+ : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
+ return *this;
}
- DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferCreateInfo ) );
return *this;
}
- DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- mode = mode_;
+ flags = flags_;
return *this;
}
- DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
- planeIndex = planeIndex_;
+ renderPass = renderPass_;
return *this;
}
- operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
+ attachmentCount = attachmentCount_;
+ return *this;
}
- operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
+ pAttachments = pAttachments_;
+ return *this;
}
- bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentCount = static_cast<uint32_t>( attachments_.size() );
+ pAttachments = attachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ {
+ width = width_;
+ return *this;
+ }
+
+ FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ {
+ height = height_;
+ return *this;
+ }
+
+ FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layers = layers_;
+ return *this;
+ }
+
+
+ operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
+ }
+
+ operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FramebufferCreateInfo const& ) const = default;
+#else
+ bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( mode == rhs.mode )
- && ( planeIndex == rhs.planeIndex );
+ && ( flags == rhs.flags )
+ && ( renderPass == rhs.renderPass )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( layers == rhs.layers );
}
- bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
- uint32_t planeIndex = {};
+ VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t layers = {};
+
};
- static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPlanePropertiesKHR
+ template <>
+ struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
{
- DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {},
- uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : currentDisplay( currentDisplay_ )
- , currentStackIndex( currentStackIndex_ )
+ using Type = FramebufferCreateInfo;
+ };
+
+ struct VertexInputBindingDescription
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT
+ : binding( binding_ ), stride( stride_ ), inputRate( inputRate_ )
{}
- DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
return *this;
}
- operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( VertexInputBindingDescription ) );
+ return *this;
}
- operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
+ binding = binding_;
+ return *this;
}
- bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
- return ( currentDisplay == rhs.currentDisplay )
- && ( currentStackIndex == rhs.currentStackIndex );
+ stride = stride_;
+ return *this;
}
- bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+ {
+ inputRate = inputRate_;
+ return *this;
+ }
+
+
+ operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
+ }
+
+ operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( VertexInputBindingDescription const& ) const = default;
+#else
+ bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( binding == rhs.binding )
+ && ( stride == rhs.stride )
+ && ( inputRate == rhs.inputRate );
+ }
+
+ bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
- uint32_t currentStackIndex = {};
+ uint32_t binding = {};
+ uint32_t stride = {};
+ VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
+
};
- static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPlaneProperties2KHR
+ struct VertexInputAttributeDescription
{
- DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayPlaneProperties( displayPlaneProperties_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
+ : location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
return *this;
}
- DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( VertexInputAttributeDescription ) );
+ return *this;
}
- DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>(&rhs);
+ location = location_;
return *this;
}
- operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
+ binding = binding_;
+ return *this;
}
- operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
+ format = format_;
+ return *this;
}
- bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( displayPlaneProperties == rhs.displayPlaneProperties );
+ offset = offset_;
+ return *this;
}
- bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
+ }
+
+ operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( VertexInputAttributeDescription const& ) const = default;
+#else
+ bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( location == rhs.location )
+ && ( binding == rhs.binding )
+ && ( format == rhs.format )
+ && ( offset == rhs.offset );
+ }
+
+ bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
+ uint32_t location = {};
+ uint32_t binding = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint32_t offset = {};
+
};
- static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPowerInfoEXT
+ struct PipelineVertexInputStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
- : powerState( powerState_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ), pVertexBindingDescriptions( pVertexBindingDescriptions_ ), vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ), pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) - offsetof( DisplayPowerInfoEXT, pNext ) );
- return *this;
+ *this = rhs;
}
- DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {} )
+ : flags( flags_ ), vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
+ return *this;
}
- DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
return *this;
}
- DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- powerState = powerState_;
+ flags = flags_;
return *this;
}
- operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
+ vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+ return *this;
}
- operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
+ pVertexBindingDescriptions = pVertexBindingDescriptions_;
+ return *this;
}
- bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
+ pVertexBindingDescriptions = vertexBindingDescriptions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+ return *this;
+ }
+
+ PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
+ pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( powerState == rhs.powerState );
+ && ( flags == rhs.flags )
+ && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
+ && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
+ && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
+ && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
}
- bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
+ VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
+ uint32_t vertexBindingDescriptionCount = {};
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {};
+ uint32_t vertexAttributeDescriptionCount = {};
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {};
+
};
- static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPresentInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
{
- VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {},
- VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcRect( srcRect_ )
- , dstRect( dstRect_ )
- , persistent( persistent_ )
+ using Type = PipelineVertexInputStateCreateInfo;
+ };
+
+ struct PipelineInputAssemblyStateCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), topology( topology_ ), primitiveRestartEnable( primitiveRestartEnable_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) - offsetof( DisplayPresentInfoKHR, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
+ return *this;
}
- DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
return *this;
}
- DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- srcRect = srcRect_;
+ flags = flags_;
return *this;
}
- DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D dstRect_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
{
- dstRect = dstRect_;
+ topology = topology_;
return *this;
}
- DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
{
- persistent = persistent_;
+ primitiveRestartEnable = primitiveRestartEnable_;
return *this;
}
- operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
+ return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
}
- operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
+ return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
}
- bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( srcRect == rhs.srcRect )
- && ( dstRect == rhs.dstRect )
- && ( persistent == rhs.persistent );
+ && ( flags == rhs.flags )
+ && ( topology == rhs.topology )
+ && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
}
- bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
- VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
- VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
+ VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
+
};
- static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayPropertiesKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
{
- DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {},
- const char* displayName_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT
- : display( display_ )
- , displayName( displayName_ )
- , physicalDimensions( physicalDimensions_ )
- , physicalResolution( physicalResolution_ )
- , supportedTransforms( supportedTransforms_ )
- , planeReorderPossible( planeReorderPossible_ )
- , persistentContent( persistentContent_ )
+ using Type = PipelineInputAssemblyStateCreateInfo;
+ };
+
+ struct PipelineTessellationStateCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), patchControlPoints( patchControlPoints_ )
{}
- DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
return *this;
}
- operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
+ return *this;
}
- operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return ( display == rhs.display )
- && ( displayName == rhs.displayName )
- && ( physicalDimensions == rhs.physicalDimensions )
- && ( physicalResolution == rhs.physicalResolution )
- && ( supportedTransforms == rhs.supportedTransforms )
- && ( planeReorderPossible == rhs.planeReorderPossible )
- && ( persistentContent == rhs.persistentContent );
+ flags = flags_;
+ return *this;
}
- bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+ {
+ patchControlPoints = patchControlPoints_;
+ return *this;
+ }
+
+
+ operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( patchControlPoints == rhs.patchControlPoints );
+ }
+
+ bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
- const char* displayName = {};
- VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
- VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
- VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
- VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
+ uint32_t patchControlPoints = {};
+
};
- static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplayProperties2KHR
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
{
- DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayProperties( displayProperties_ )
+ using Type = PipelineTessellationStateCreateInfo;
+ };
+
+ struct PipelineViewportStateCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), viewportCount( viewportCount_ ), pViewports( pViewports_ ), scissorCount( scissorCount_ ), pScissors( pScissors_ )
{}
- VULKAN_HPP_NAMESPACE::DisplayProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) - offsetof( DisplayProperties2KHR, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {} )
+ : flags( flags_ ), viewportCount( static_cast<uint32_t>( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast<uint32_t>( scissors_.size() ) ), pScissors( scissors_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
return *this;
}
- DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportStateCreateInfo ) );
+ return *this;
}
- DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
+ viewportCount = viewportCount_;
+ return *this;
}
- bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pViewports = pViewports_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewportCount = static_cast<uint32_t>( viewports_.size() );
+ pViewports = viewports_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ scissorCount = scissorCount_;
+ return *this;
+ }
+
+ PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pScissors = pScissors_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
+ {
+ scissorCount = static_cast<uint32_t>( scissors_.size() );
+ pScissors = scissors_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( displayProperties == rhs.displayProperties );
+ && ( flags == rhs.flags )
+ && ( viewportCount == rhs.viewportCount )
+ && ( pViewports == rhs.pViewports )
+ && ( scissorCount == rhs.scissorCount )
+ && ( pScissors == rhs.pScissors );
}
- bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
+ uint32_t viewportCount = {};
+ const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {};
+ uint32_t scissorCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {};
+
};
- static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DisplaySurfaceCreateInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
{
- VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
- uint32_t planeIndex_ = {},
- uint32_t planeStackIndex_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- float globalAlpha_ = {},
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , displayMode( displayMode_ )
- , planeIndex( planeIndex_ )
- , planeStackIndex( planeStackIndex_ )
- , transform( transform_ )
- , globalAlpha( globalAlpha_ )
- , alphaMode( alphaMode_ )
- , imageExtent( imageExtent_ )
+ using Type = PipelineViewportStateCreateInfo;
+ };
+
+ struct PipelineRasterizationStateCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), depthClampEnable( depthClampEnable_ ), rasterizerDiscardEnable( rasterizerDiscardEnable_ ), polygonMode( polygonMode_ ), cullMode( cullMode_ ), frontFace( frontFace_ ), depthBiasEnable( depthBiasEnable_ ), depthBiasConstantFactor( depthBiasConstantFactor_ ), depthBiasClamp( depthBiasClamp_ ), depthBiasSlopeFactor( depthBiasSlopeFactor_ ), lineWidth( lineWidth_ )
{}
- VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
+ return *this;
}
- DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
return *this;
}
- DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
{
- displayMode = displayMode_;
+ depthClampEnable = depthClampEnable_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
{
- planeIndex = planeIndex_;
+ rasterizerDiscardEnable = rasterizerDiscardEnable_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
{
- planeStackIndex = planeStackIndex_;
+ polygonMode = polygonMode_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
{
- transform = transform_;
+ cullMode = cullMode_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
{
- globalAlpha = globalAlpha_;
+ frontFace = frontFace_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
{
- alphaMode = alphaMode_;
+ depthBiasEnable = depthBiasEnable_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
{
- imageExtent = imageExtent_;
+ depthBiasConstantFactor = depthBiasConstantFactor_;
return *this;
}
- operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
+ depthBiasClamp = depthBiasClamp_;
+ return *this;
}
- operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
+ depthBiasSlopeFactor = depthBiasSlopeFactor_;
+ return *this;
}
- bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ lineWidth = lineWidth_;
+ return *this;
+ }
+
+
+ operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( displayMode == rhs.displayMode )
- && ( planeIndex == rhs.planeIndex )
- && ( planeStackIndex == rhs.planeStackIndex )
- && ( transform == rhs.transform )
- && ( globalAlpha == rhs.globalAlpha )
- && ( alphaMode == rhs.alphaMode )
- && ( imageExtent == rhs.imageExtent );
+ && ( depthClampEnable == rhs.depthClampEnable )
+ && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
+ && ( polygonMode == rhs.polygonMode )
+ && ( cullMode == rhs.cullMode )
+ && ( frontFace == rhs.frontFace )
+ && ( depthBiasEnable == rhs.depthBiasEnable )
+ && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
+ && ( depthBiasClamp == rhs.depthBiasClamp )
+ && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
+ && ( lineWidth == rhs.lineWidth );
}
- bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
- uint32_t planeIndex = {};
- uint32_t planeStackIndex = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- float globalAlpha = {};
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
+ VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
+ VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
+ VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
+ float depthBiasConstantFactor = {};
+ float depthBiasClamp = {};
+ float depthBiasSlopeFactor = {};
+ float lineWidth = {};
+
};
- static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DrawIndexedIndirectCommand
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
{
- VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {},
- uint32_t instanceCount_ = {},
- uint32_t firstIndex_ = {},
- int32_t vertexOffset_ = {},
- uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
- : indexCount( indexCount_ )
- , instanceCount( instanceCount_ )
- , firstIndex( firstIndex_ )
- , vertexOffset( vertexOffset_ )
- , firstInstance( firstInstance_ )
+ using Type = PipelineRasterizationStateCreateInfo;
+ };
+
+ struct PipelineMultisampleStateCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), rasterizationSamples( rasterizationSamples_ ), sampleShadingEnable( sampleShadingEnable_ ), minSampleShading( minSampleShading_ ), pSampleMask( pSampleMask_ ), alphaToCoverageEnable( alphaToCoverageEnable_ ), alphaToOneEnable( alphaToOneEnable_ )
{}
- DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
return *this;
}
- DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- indexCount = indexCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
return *this;
}
- DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- instanceCount = instanceCount_;
+ pNext = pNext_;
return *this;
}
- DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- firstIndex = firstIndex_;
+ flags = flags_;
return *this;
}
- DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
- vertexOffset = vertexOffset_;
+ rasterizationSamples = rasterizationSamples_;
return *this;
}
- DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
{
- firstInstance = firstInstance_;
+ sampleShadingEnable = sampleShadingEnable_;
return *this;
}
- operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
+ minSampleShading = minSampleShading_;
+ return *this;
}
- operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
+ pSampleMask = pSampleMask_;
+ return *this;
}
- bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
{
- return ( indexCount == rhs.indexCount )
- && ( instanceCount == rhs.instanceCount )
- && ( firstIndex == rhs.firstIndex )
- && ( vertexOffset == rhs.vertexOffset )
- && ( firstInstance == rhs.firstInstance );
+ alphaToCoverageEnable = alphaToCoverageEnable_;
+ return *this;
}
- bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ alphaToOneEnable = alphaToOneEnable_;
+ return *this;
+ }
+
+
+ operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( rasterizationSamples == rhs.rasterizationSamples )
+ && ( sampleShadingEnable == rhs.sampleShadingEnable )
+ && ( minSampleShading == rhs.minSampleShading )
+ && ( pSampleMask == rhs.pSampleMask )
+ && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
+ && ( alphaToOneEnable == rhs.alphaToOneEnable );
+ }
+
+ bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t indexCount = {};
- uint32_t instanceCount = {};
- uint32_t firstIndex = {};
- int32_t vertexOffset = {};
- uint32_t firstInstance = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
+ float minSampleShading = {};
+ const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
+
};
- static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DrawIndirectCommand
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
{
- VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {},
- uint32_t instanceCount_ = {},
- uint32_t firstVertex_ = {},
- uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexCount( vertexCount_ )
- , instanceCount( instanceCount_ )
- , firstVertex( firstVertex_ )
- , firstInstance( firstInstance_ )
+ using Type = PipelineMultisampleStateCreateInfo;
+ };
+
+ struct StencilOpState
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT
+ : failOp( failOp_ ), passOp( passOp_ ), depthFailOp( depthFailOp_ ), compareOp( compareOp_ ), compareMask( compareMask_ ), writeMask( writeMask_ ), reference( reference_ )
{}
- DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
return *this;
}
- DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- vertexCount = vertexCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( StencilOpState ) );
return *this;
}
- DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
{
- instanceCount = instanceCount_;
+ failOp = failOp_;
return *this;
}
- DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
{
- firstVertex = firstVertex_;
+ passOp = passOp_;
return *this;
}
- DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
{
- firstInstance = firstInstance_;
+ depthFailOp = depthFailOp_;
return *this;
}
- operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+ StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
+ compareOp = compareOp_;
+ return *this;
}
- operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDrawIndirectCommand*>( this );
+ compareMask = compareMask_;
+ return *this;
}
- bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
{
- return ( vertexCount == rhs.vertexCount )
- && ( instanceCount == rhs.instanceCount )
- && ( firstVertex == rhs.firstVertex )
- && ( firstInstance == rhs.firstInstance );
+ writeMask = writeMask_;
+ return *this;
}
- bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
+ {
+ reference = reference_;
+ return *this;
+ }
+
+
+ operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkStencilOpState*>( this );
+ }
+
+ operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkStencilOpState*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( StencilOpState const& ) const = default;
+#else
+ bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( failOp == rhs.failOp )
+ && ( passOp == rhs.passOp )
+ && ( depthFailOp == rhs.depthFailOp )
+ && ( compareOp == rhs.compareOp )
+ && ( compareMask == rhs.compareMask )
+ && ( writeMask == rhs.writeMask )
+ && ( reference == rhs.reference );
+ }
+
+ bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t vertexCount = {};
- uint32_t instanceCount = {};
- uint32_t firstVertex = {};
- uint32_t firstInstance = {};
+ VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+ VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ uint32_t compareMask = {};
+ uint32_t writeMask = {};
+ uint32_t reference = {};
+
};
- static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );
- struct DrawMeshTasksIndirectCommandNV
+ struct PipelineDepthStencilStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {},
- uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT
- : taskCount( taskCount_ )
- , firstTask( firstTask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), depthTestEnable( depthTestEnable_ ), depthWriteEnable( depthWriteEnable_ ), depthCompareOp( depthCompareOp_ ), depthBoundsTestEnable( depthBoundsTestEnable_ ), stencilTestEnable( stencilTestEnable_ ), front( front_ ), back( back_ ), minDepthBounds( minDepthBounds_ ), maxDepthBounds( maxDepthBounds_ )
{}
- DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
return *this;
}
- DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- taskCount = taskCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
return *this;
}
- DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- firstTask = firstTask_;
+ pNext = pNext_;
return *this;
}
- operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
+ depthTestEnable = depthTestEnable_;
+ return *this;
}
- bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
{
- return ( taskCount == rhs.taskCount )
- && ( firstTask == rhs.firstTask );
+ depthWriteEnable = depthWriteEnable_;
+ return *this;
}
- bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ depthCompareOp = depthCompareOp_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ depthBoundsTestEnable = depthBoundsTestEnable_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stencilTestEnable = stencilTestEnable_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
+ {
+ front = front_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
+ {
+ back = back_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ minDepthBounds = minDepthBounds_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDepthBounds = maxDepthBounds_;
+ return *this;
+ }
+
+
+ operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( depthTestEnable == rhs.depthTestEnable )
+ && ( depthWriteEnable == rhs.depthWriteEnable )
+ && ( depthCompareOp == rhs.depthCompareOp )
+ && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
+ && ( stencilTestEnable == rhs.stencilTestEnable )
+ && ( front == rhs.front )
+ && ( back == rhs.back )
+ && ( minDepthBounds == rhs.minDepthBounds )
+ && ( maxDepthBounds == rhs.maxDepthBounds );
+ }
+
+ bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t taskCount = {};
- uint32_t firstTask = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
+ VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
+ VULKAN_HPP_NAMESPACE::StencilOpState front = {};
+ VULKAN_HPP_NAMESPACE::StencilOpState back = {};
+ float minDepthBounds = {};
+ float maxDepthBounds = {};
+
};
- static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DrmFormatModifierPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
{
- DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {},
- uint32_t drmFormatModifierPlaneCount_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
- , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
- , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
+ using Type = PipelineDepthStencilStateCreateInfo;
+ };
+
+ struct PipelineColorBlendAttachmentState
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : blendEnable( blendEnable_ ), srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ ), colorWriteMask( colorWriteMask_ )
{}
- DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
return *this;
}
- operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineColorBlendAttachmentState ) );
+ return *this;
}
- operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
+ blendEnable = blendEnable_;
+ return *this;
}
- bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
- return ( drmFormatModifier == rhs.drmFormatModifier )
- && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
- && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
+ srcColorBlendFactor = srcColorBlendFactor_;
+ return *this;
}
- bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstColorBlendFactor = dstColorBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorBlendOp = colorBlendOp_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAlphaBlendFactor = srcAlphaBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAlphaBlendFactor = dstAlphaBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ alphaBlendOp = alphaBlendOp_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorWriteMask = colorWriteMask_;
+ return *this;
+ }
+
+
+ operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
+ }
+
+ operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default;
+#else
+ bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( blendEnable == rhs.blendEnable )
+ && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+ && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+ && ( colorBlendOp == rhs.colorBlendOp )
+ && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+ && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+ && ( alphaBlendOp == rhs.alphaBlendOp )
+ && ( colorWriteMask == rhs.colorWriteMask );
+ }
+
+ bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint64_t drmFormatModifier = {};
- uint32_t drmFormatModifierPlaneCount = {};
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
+ VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
+ VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
+
};
- static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
- struct DrmFormatModifierPropertiesListEXT
+ struct PipelineColorBlendStateCreateInfo
{
- DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {},
- VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifierCount( drmFormatModifierCount_ )
- , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, std::array<float,4> const& blendConstants_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), blendConstants( blendConstants_ )
{}
- VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT & operator=( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, std::array<float,4> const& blendConstants_ = {} )
+ : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
return *this;
}
- DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
+ return *this;
}
- DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
+ logicOpEnable = logicOpEnable_;
+ return *this;
}
- bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ logicOp = logicOp_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentCount = attachmentCount_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pAttachments = pAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentCount = static_cast<uint32_t>( attachments_.size() );
+ pAttachments = attachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
+ {
+ blendConstants = blendConstants_;
+ return *this;
+ }
+
+
+ operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
- && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
+ && ( flags == rhs.flags )
+ && ( logicOpEnable == rhs.logicOpEnable )
+ && ( logicOp == rhs.logicOp )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( blendConstants == rhs.blendConstants );
}
- bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
- void* pNext = {};
- uint32_t drmFormatModifierCount = {};
- VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
+ VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
+
};
- static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct EventCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
{
- VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ using Type = PipelineColorBlendStateCreateInfo;
+ };
+
+ struct PipelineDynamicStateCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), dynamicStateCount( dynamicStateCount_ ), pDynamicStates( pDynamicStates_ )
{}
- VULKAN_HPP_NAMESPACE::EventCreateInfo & operator=( VULKAN_HPP_NAMESPACE::EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) - offsetof( EventCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
- EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
+ : flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
+ return *this;
}
- EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
return *this;
}
- EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkEventCreateInfo*>( this );
+ dynamicStateCount = dynamicStateCount_;
+ return *this;
}
- operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkEventCreateInfo*>( this );
+ pDynamicStates = pDynamicStates_;
+ return *this;
}
- bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
+ pDynamicStates = dynamicStates_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
+ }
+
+ operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags );
+ && ( flags == rhs.flags )
+ && ( dynamicStateCount == rhs.dynamicStateCount )
+ && ( pDynamicStates == rhs.pDynamicStates );
}
- bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
+ uint32_t dynamicStateCount = {};
+ const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {};
+
};
- static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ExportFenceCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ using Type = PipelineDynamicStateCreateInfo;
+ };
+
+ struct GraphicsPipelineCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
- VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) - offsetof( ExportFenceCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
- ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
+ : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
+ return *this;
}
- ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GraphicsPipelineCreateInfo ) );
return *this;
}
- ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ flags = flags_;
return *this;
}
- operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
+ stageCount = stageCount_;
+ return *this;
}
- operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
+ pStages = pStages_;
+ return *this;
}
- bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageCount = static_cast<uint32_t>( stages_.size() );
+ pStages = stages_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVertexInputState = pVertexInputState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pInputAssemblyState = pInputAssemblyState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pTessellationState = pTessellationState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pViewportState = pViewportState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRasterizationState = pRasterizationState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pMultisampleState = pMultisampleState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pDepthStencilState = pDepthStencilState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pColorBlendState = pColorBlendState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pDynamicState = pDynamicState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ {
+ renderPass = renderPass_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpass = subpass_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
+ }
+
+
+ operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
+ }
+
+ operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default;
+#else
+ bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( flags == rhs.flags )
+ && ( stageCount == rhs.stageCount )
+ && ( pStages == rhs.pStages )
+ && ( pVertexInputState == rhs.pVertexInputState )
+ && ( pInputAssemblyState == rhs.pInputAssemblyState )
+ && ( pTessellationState == rhs.pTessellationState )
+ && ( pViewportState == rhs.pViewportState )
+ && ( pRasterizationState == rhs.pRasterizationState )
+ && ( pMultisampleState == rhs.pMultisampleState )
+ && ( pDepthStencilState == rhs.pDepthStencilState )
+ && ( pColorBlendState == rhs.pColorBlendState )
+ && ( pDynamicState == rhs.pDynamicState )
+ && ( layout == rhs.layout )
+ && ( renderPass == rhs.renderPass )
+ && ( subpass == rhs.subpass )
+ && ( basePipelineHandle == rhs.basePipelineHandle )
+ && ( basePipelineIndex == rhs.basePipelineIndex );
}
- bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ uint32_t subpass = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
+
};
- static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
+ {
+ using Type = GraphicsPipelineCreateInfo;
+ };
- struct ExportFenceWin32HandleInfoKHR
+ struct ImageCreateInfo
{
- VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {},
- DWORD dwAccess_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- , name( name_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), initialLayout( initialLayout_ )
{}
- VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) );
- return *this;
+ *this = rhs;
}
- ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
+ : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
+ return *this;
}
- ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageCreateInfo ) );
return *this;
}
- ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- pAttributes = pAttributes_;
+ flags = flags_;
return *this;
}
- ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
{
- dwAccess = dwAccess_;
+ imageType = imageType_;
return *this;
}
- ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
- name = name_;
+ format = format_;
return *this;
}
- operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
+ extent = extent_;
+ return *this;
}
- operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
+ mipLevels = mipLevels_;
+ return *this;
}
- bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ arrayLayers = arrayLayers_;
+ return *this;
+ }
+
+ ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ {
+ samples = samples_;
+ return *this;
+ }
+
+ ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tiling = tiling_;
+ return *this;
+ }
+
+ ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sharingMode = sharingMode_;
+ return *this;
+ }
+
+ ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueFamilyIndexCount = queueFamilyIndexCount_;
+ return *this;
+ }
+
+ ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pQueueFamilyIndices = pQueueFamilyIndices_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+ pQueueFamilyIndices = queueFamilyIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ initialLayout = initialLayout_;
+ return *this;
+ }
+
+
+ operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCreateInfo*>( this );
+ }
+
+ operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageCreateInfo const& ) const = default;
+#else
+ bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pAttributes == rhs.pAttributes )
- && ( dwAccess == rhs.dwAccess )
- && ( name == rhs.name );
+ && ( flags == rhs.flags )
+ && ( imageType == rhs.imageType )
+ && ( format == rhs.format )
+ && ( extent == rhs.extent )
+ && ( mipLevels == rhs.mipLevels )
+ && ( arrayLayers == rhs.arrayLayers )
+ && ( samples == rhs.samples )
+ && ( tiling == rhs.tiling )
+ && ( usage == rhs.usage )
+ && ( sharingMode == rhs.sharingMode )
+ && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+ && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+ && ( initialLayout == rhs.initialLayout );
}
- bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
const void* pNext = {};
- const SECURITY_ATTRIBUTES* pAttributes = {};
- DWORD dwAccess = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+ uint32_t mipLevels = {};
+ uint32_t arrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t* pQueueFamilyIndices = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
};
- static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ExportMemoryAllocateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eImageCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ using Type = ImageCreateInfo;
+ };
+
+ struct ImageViewCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), image( image_ ), viewType( viewType_ ), format( format_ ), components( components_ ), subresourceRange( subresourceRange_ )
{}
- VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) - offsetof( ExportMemoryAllocateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
+ return *this;
}
- ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewCreateInfo ) );
return *this;
}
- ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ flags = flags_;
return *this;
}
- operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
+ image = image_;
+ return *this;
}
- operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
+ viewType = viewType_;
+ return *this;
}
- bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ {
+ format = format_;
+ return *this;
+ }
+
+ ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+ {
+ components = components_;
+ return *this;
+ }
+
+ ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subresourceRange = subresourceRange_;
+ return *this;
+ }
+
+
+ operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
+ }
+
+ operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageViewCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageViewCreateInfo const& ) const = default;
+#else
+ bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( flags == rhs.flags )
+ && ( image == rhs.image )
+ && ( viewType == rhs.viewType )
+ && ( format == rhs.format )
+ && ( components == rhs.components )
+ && ( subresourceRange == rhs.subresourceRange );
}
- bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+ VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
};
- static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ExportMemoryAllocateInfoNV
+ template <>
+ struct CppType<StructureType, StructureType::eImageViewCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ using Type = ImageViewCreateInfo;
+ };
+
+ struct IndirectCommandsLayoutTokenNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, const uint32_t* pIndexTypeValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( indexTypeCount_ ), pIndexTypes( pIndexTypes_ ), pIndexTypeValues( pIndexTypeValues_ )
{}
- VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) );
- return *this;
+ *this = rhs;
}
- ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {} )
+ : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() )
{
- *this = rhs;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
+#else
+ if ( indexTypes_.size() != indexTypeValues_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
return *this;
}
- ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( IndirectCommandsLayoutTokenNV ) );
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ tokenType = tokenType_;
return *this;
}
- operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
+ stream = stream_;
+ return *this;
}
- operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
+ offset = offset_;
+ return *this;
}
- bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexBindingUnit = vertexBindingUnit_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexDynamicStride = vertexDynamicStride_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pushconstantPipelineLayout = pushconstantPipelineLayout_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pushconstantOffset = pushconstantOffset_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pushconstantSize = pushconstantSize_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indirectStateFlags = indirectStateFlags_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexTypeCount = indexTypeCount_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pIndexTypes = pIndexTypes_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
+ pIndexTypes = indexTypes_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pIndexTypeValues = pIndexTypeValues_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
+ pIndexTypeValues = indexTypeValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
+ }
+
+ operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default;
+#else
+ bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( tokenType == rhs.tokenType )
+ && ( stream == rhs.stream )
+ && ( offset == rhs.offset )
+ && ( vertexBindingUnit == rhs.vertexBindingUnit )
+ && ( vertexDynamicStride == rhs.vertexDynamicStride )
+ && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout )
+ && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags )
+ && ( pushconstantOffset == rhs.pushconstantOffset )
+ && ( pushconstantSize == rhs.pushconstantSize )
+ && ( indirectStateFlags == rhs.indirectStateFlags )
+ && ( indexTypeCount == rhs.indexTypeCount )
+ && ( pIndexTypes == rhs.pIndexTypes )
+ && ( pIndexTypeValues == rhs.pIndexTypeValues );
}
- bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
+ uint32_t stream = {};
+ uint32_t offset = {};
+ uint32_t vertexBindingUnit = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
+ uint32_t pushconstantOffset = {};
+ uint32_t pushconstantSize = {};
+ VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
+ uint32_t indexTypeCount = {};
+ const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {};
+ const uint32_t* pIndexTypeValues = {};
+
};
- static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
+ {
+ using Type = IndirectCommandsLayoutTokenNV;
+ };
- struct ExportMemoryWin32HandleInfoKHR
+ struct IndirectCommandsLayoutCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {},
- DWORD dwAccess_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- , name( name_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t* pStreamStrides_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( tokenCount_ ), pTokens( pTokens_ ), streamCount( streamCount_ ), pStreamStrides( pStreamStrides_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {} )
+ : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast<uint32_t>( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
return *this;
}
- ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( IndirectCommandsLayoutCreateInfoNV ) );
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tokenCount = tokenCount_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pTokens = pTokens_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tokenCount = static_cast<uint32_t>( tokens_.size() );
+ pTokens = tokens_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ streamCount = streamCount_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pStreamStrides = pStreamStrides_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
+ {
+ streamCount = static_cast<uint32_t>( streamStrides_.size() );
+ pStreamStrides = streamStrides_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
+ }
+
+ operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default;
+#else
+ bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( tokenCount == rhs.tokenCount )
+ && ( pTokens == rhs.pTokens )
+ && ( streamCount == rhs.streamCount )
+ && ( pStreamStrides == rhs.pStreamStrides );
+ }
+
+ bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t tokenCount = {};
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {};
+ uint32_t streamCount = {};
+ const uint32_t* pStreamStrides = {};
+
+ };
+ static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
+ {
+ using Type = IndirectCommandsLayoutCreateInfoNV;
+ };
+
+ struct PipelineCacheCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
+ : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
return *this;
}
- ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCacheCreateInfo ) );
+ return *this;
+ }
+
+ PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- pAttributes = pAttributes_;
+ flags = flags_;
return *this;
}
- ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
{
- dwAccess = dwAccess_;
+ initialDataSize = initialDataSize_;
return *this;
}
- ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
{
- name = name_;
+ pInitialData = pInitialData_;
return *this;
}
- operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
+ initialDataSize = initialData_.size() * sizeof(T);
+ pInitialData = initialData_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
}
- bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCacheCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pAttributes == rhs.pAttributes )
- && ( dwAccess == rhs.dwAccess )
- && ( name == rhs.name );
+ && ( flags == rhs.flags )
+ && ( initialDataSize == rhs.initialDataSize )
+ && ( pInitialData == rhs.pInitialData );
}
- bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
const void* pNext = {};
- const SECURITY_ATTRIBUTES* pAttributes = {};
- DWORD dwAccess = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
+ size_t initialDataSize = {};
+ const void* pInitialData = {};
+
};
- static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
+ {
+ using Type = PipelineCacheCreateInfo;
+ };
- struct ExportMemoryWin32HandleInfoNV
+ struct PushConstantRange
{
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = {},
- DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT
+ : stageFlags( stageFlags_ ), offset( offset_ ), size( size_ )
{}
- VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
return *this;
}
- ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PushConstantRange ) );
+ return *this;
+ }
+
+ PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageFlags = stageFlags_;
+ return *this;
+ }
+
+ PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+
+ operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPushConstantRange*>( this );
+ }
+
+ operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPushConstantRange*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PushConstantRange const& ) const = default;
+#else
+ bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( stageFlags == rhs.stageFlags )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+ uint32_t offset = {};
+ uint32_t size = {};
+
+ };
+ static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );
+
+ struct PipelineLayoutCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), setLayoutCount( setLayoutCount_ ), pSetLayouts( pSetLayouts_ ), pushConstantRangeCount( pushConstantRangeCount_ ), pPushConstantRanges( pPushConstantRanges_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {} )
+ : flags( flags_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
return *this;
}
- ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineLayoutCreateInfo ) );
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- pAttributes = pAttributes_;
+ flags = flags_;
return *this;
}
- ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
- dwAccess = dwAccess_;
+ setLayoutCount = setLayoutCount_;
return *this;
}
- operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
+ pSetLayouts = pSetLayouts_;
+ return *this;
}
- operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
+ setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
+ pSetLayouts = setLayouts_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pushConstantRangeCount = pushConstantRangeCount_;
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPushConstantRanges = pPushConstantRanges_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
+ pPushConstantRanges = pushConstantRanges_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
+ }
+
+ operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineLayoutCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pAttributes == rhs.pAttributes )
- && ( dwAccess == rhs.dwAccess );
+ && ( flags == rhs.flags )
+ && ( setLayoutCount == rhs.setLayoutCount )
+ && ( pSetLayouts == rhs.pSetLayouts )
+ && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
+ && ( pPushConstantRanges == rhs.pPushConstantRanges );
}
- bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
const void* pNext = {};
- const SECURITY_ATTRIBUTES* pAttributes = {};
- DWORD dwAccess = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
+ uint32_t setLayoutCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
+ uint32_t pushConstantRangeCount = {};
+ const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {};
+
};
- static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ExportSemaphoreCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ using Type = PipelineLayoutCreateInfo;
+ };
+
+ struct PrivateDataSlotCreateInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) - offsetof( ExportSemaphoreCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT const *>( &rhs );
+ return *this;
}
- ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PrivateDataSlotCreateInfoEXT & operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PrivateDataSlotCreateInfoEXT ) );
return *this;
}
- ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PrivateDataSlotCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ PrivateDataSlotCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ flags = flags_;
return *this;
}
- operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPrivateDataSlotCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
+ return *reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT*>( this );
}
- operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
+ return *reinterpret_cast<VkPrivateDataSlotCreateInfoEXT*>( this );
}
- bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PrivateDataSlotCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( flags == rhs.flags );
}
- bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {};
+
};
- static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PrivateDataSlotCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfoEXT>
+ {
+ using Type = PrivateDataSlotCreateInfoEXT;
+ };
- struct ExportSemaphoreWin32HandleInfoKHR
+ class PrivateDataSlotEXT
{
- VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {},
- DWORD dwAccess_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- , name( name_ )
+ public:
+ using CType = VkPrivateDataSlotEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() VULKAN_HPP_NOEXCEPT
+ : m_privateDataSlotEXT(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_privateDataSlotEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT
+ : m_privateDataSlotEXT( privateDataSlotEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PrivateDataSlotEXT & operator=(VkPrivateDataSlotEXT privateDataSlotEXT) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) );
+ m_privateDataSlotEXT = privateDataSlotEXT;
return *this;
}
+#endif
- ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_privateDataSlotEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PrivateDataSlotEXT const& ) const = default;
+#else
+ bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT;
+ }
+
+ bool operator!=(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT;
+ }
+
+ bool operator<(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_privateDataSlotEXT;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_privateDataSlotEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_privateDataSlotEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPrivateDataSlotEXT m_privateDataSlotEXT;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePrivateDataSlotEXT>
+ {
+ using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
+ };
+
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct QueryPoolCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), queryType( queryType_ ), queryCount( queryCount_ ), pipelineStatistics( pipelineStatistics_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
return *this;
}
- ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( QueryPoolCreateInfo ) );
+ return *this;
+ }
+
+ QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- pAttributes = pAttributes_;
+ flags = flags_;
return *this;
}
- ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
{
- dwAccess = dwAccess_;
+ queryType = queryType_;
return *this;
}
- ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
{
- name = name_;
+ queryCount = queryCount_;
return *this;
}
- operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
+ pipelineStatistics = pipelineStatistics_;
+ return *this;
}
- operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
}
- bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueryPoolCreateInfo const& ) const = default;
+#else
+ bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pAttributes == rhs.pAttributes )
- && ( dwAccess == rhs.dwAccess )
- && ( name == rhs.name );
+ && ( flags == rhs.flags )
+ && ( queryType == rhs.queryType )
+ && ( queryCount == rhs.queryCount )
+ && ( pipelineStatistics == rhs.pipelineStatistics );
}
- bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
const void* pNext = {};
- const SECURITY_ATTRIBUTES* pAttributes = {};
- DWORD dwAccess = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
+ uint32_t queryCount = {};
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
+
};
- static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ExtensionProperties
+ template <>
+ struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
+ {
+ using Type = QueryPoolCreateInfo;
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct RayTracingShaderGroupCreateInfoKHR
{
- ExtensionProperties( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& extensionName_ = {},
- uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : extensionName{}
- , specVersion( specVersion_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void* pShaderGroupCaptureReplayHandle_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ), pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( extensionName, extensionName_ );
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
+ return *this;
}
- ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingShaderGroupCreateInfoKHR ) );
return *this;
}
- operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExtensionProperties*>( this );
+ pNext = pNext_;
+ return *this;
}
- operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExtensionProperties*>( this );
+ type = type_;
+ return *this;
}
- bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
{
- return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( specVersion == rhs.specVersion );
+ generalShader = generalShader_;
+ return *this;
}
- bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ closestHitShader = closestHitShader_;
+ return *this;
+ }
+
+ RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ anyHitShader = anyHitShader_;
+ return *this;
+ }
+
+ RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ intersectionShader = intersectionShader_;
+ return *this;
+ }
+
+ RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
+ return *this;
+ }
+
+
+ operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
+ }
+
+ operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( type == rhs.type )
+ && ( generalShader == rhs.generalShader )
+ && ( closestHitShader == rhs.closestHitShader )
+ && ( anyHitShader == rhs.anyHitShader )
+ && ( intersectionShader == rhs.intersectionShader )
+ && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
+ }
+
+ bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- char extensionName[VK_MAX_EXTENSION_NAME_SIZE] = {};
- uint32_t specVersion = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+ uint32_t generalShader = {};
+ uint32_t closestHitShader = {};
+ uint32_t anyHitShader = {};
+ uint32_t intersectionShader = {};
+ const void* pShaderGroupCaptureReplayHandle = {};
+
};
- static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct ExternalMemoryProperties
+ template <>
+ struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
+ {
+ using Type = RayTracingShaderGroupCreateInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct PipelineLibraryCreateInfoKHR
{
- ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalMemoryFeatures( externalMemoryFeatures_ )
- , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
- , compatibleHandleTypes( compatibleHandleTypes_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {}) VULKAN_HPP_NOEXCEPT
+ : libraryCount( libraryCount_ ), pLibraries( pLibraries_ )
{}
- ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
+ : libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineLibraryCreateInfoKHR ) );
return *this;
}
- operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+ PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
+ pNext = pNext_;
+ return *this;
}
- operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
+ PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalMemoryProperties*>( this );
+ libraryCount = libraryCount_;
+ return *this;
}
- bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT
{
- return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
- && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
- && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ pLibraries = pLibraries_;
+ return *this;
}
- bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ libraryCount = static_cast<uint32_t>( libraries_.size() );
+ pLibraries = libraries_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
+ }
+
+ operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( libraryCount == rhs.libraryCount )
+ && ( pLibraries == rhs.pLibraries );
+ }
+
+ bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
+ const void* pNext = {};
+ uint32_t libraryCount = {};
+ const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {};
+
};
- static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct ExternalBufferProperties
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
{
- ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalMemoryProperties( externalMemoryProperties_ )
+ using Type = PipelineLibraryCreateInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct RayTracingPipelineInterfaceCreateInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPayloadSize_ = {}, uint32_t maxAttributeSize_ = {}, uint32_t maxCallableSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxPayloadSize( maxPayloadSize_ ), maxAttributeSize( maxAttributeSize_ ), maxCallableSize( maxCallableSize_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) - offsetof( ExternalBufferProperties, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
return *this;
}
- ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) );
+ return *this;
}
- ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineInterfaceCreateInfoKHR & setMaxPayloadSize( uint32_t maxPayloadSize_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalBufferProperties*>( this );
+ maxPayloadSize = maxPayloadSize_;
+ return *this;
}
- operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineInterfaceCreateInfoKHR & setMaxAttributeSize( uint32_t maxAttributeSize_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalBufferProperties*>( this );
+ maxAttributeSize = maxAttributeSize_;
+ return *this;
}
- bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineInterfaceCreateInfoKHR & setMaxCallableSize( uint32_t maxCallableSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxCallableSize = maxCallableSize_;
+ return *this;
+ }
+
+
+ operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
+ }
+
+ operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( externalMemoryProperties == rhs.externalMemoryProperties );
+ && ( maxPayloadSize == rhs.maxPayloadSize )
+ && ( maxAttributeSize == rhs.maxAttributeSize )
+ && ( maxCallableSize == rhs.maxCallableSize );
}
- bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+ const void* pNext = {};
+ uint32_t maxPayloadSize = {};
+ uint32_t maxAttributeSize = {};
+ uint32_t maxCallableSize = {};
+
};
- static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RayTracingPipelineInterfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct ExternalFenceProperties
+ template <>
+ struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
{
- ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
- , compatibleHandleTypes( compatibleHandleTypes_ )
- , externalFenceFeatures( externalFenceFeatures_ )
+ using Type = RayTracingPipelineInterfaceCreateInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct RayTracingPipelineCreateInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), libraries( libraries_ ), pLibraryInterface( pLibraryInterface_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) - offsetof( ExternalFenceProperties, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
+ : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), libraries( libraries_ ), pLibraryInterface( pLibraryInterface_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
return *this;
}
- ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingPipelineCreateInfoKHR ) );
+ return *this;
}
- ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalFenceProperties*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalFenceProperties*>( this );
+ stageCount = stageCount_;
+ return *this;
}
- bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pStages = pStages_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageCount = static_cast<uint32_t>( stages_.size() );
+ pStages = stages_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCount = groupCount_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pGroups = pGroups_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCount = static_cast<uint32_t>( groups_.size() );
+ pGroups = groups_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RayTracingPipelineCreateInfoKHR & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxRecursionDepth = maxRecursionDepth_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & libraries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ libraries = libraries_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pLibraryInterface = pLibraryInterface_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
+ }
+
+
+ operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
+ }
+
+ operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
- && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
- && ( externalFenceFeatures == rhs.externalFenceFeatures );
+ && ( flags == rhs.flags )
+ && ( stageCount == rhs.stageCount )
+ && ( pStages == rhs.pStages )
+ && ( groupCount == rhs.groupCount )
+ && ( pGroups == rhs.pGroups )
+ && ( maxRecursionDepth == rhs.maxRecursionDepth )
+ && ( libraries == rhs.libraries )
+ && ( pLibraryInterface == rhs.pLibraryInterface )
+ && ( layout == rhs.layout )
+ && ( basePipelineHandle == rhs.basePipelineHandle )
+ && ( basePipelineIndex == rhs.basePipelineIndex );
}
- bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+ uint32_t groupCount = {};
+ const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {};
+ uint32_t maxRecursionDepth = {};
+ VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {};
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
+
};
- static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
+ {
+ using Type = RayTracingPipelineCreateInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- struct ExternalFormatANDROID
+ struct RayTracingShaderGroupCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalFormat( externalFormat_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalFormatANDROID & operator=( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) - offsetof( ExternalFormatANDROID, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
+ return *this;
}
- ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) );
return *this;
}
- ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
- externalFormat = externalFormat_;
+ type = type_;
return *this;
}
- operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
+ generalShader = generalShader_;
+ return *this;
}
- operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalFormatANDROID*>( this );
+ closestHitShader = closestHitShader_;
+ return *this;
}
- bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ anyHitShader = anyHitShader_;
+ return *this;
+ }
+
+ RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ intersectionShader = intersectionShader_;
+ return *this;
+ }
+
+
+ operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
+ }
+
+ operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default;
+#else
+ bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( externalFormat == rhs.externalFormat );
+ && ( type == rhs.type )
+ && ( generalShader == rhs.generalShader )
+ && ( closestHitShader == rhs.closestHitShader )
+ && ( anyHitShader == rhs.anyHitShader )
+ && ( intersectionShader == rhs.intersectionShader );
}
- bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
- void* pNext = {};
- uint64_t externalFormat = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+ uint32_t generalShader = {};
+ uint32_t closestHitShader = {};
+ uint32_t anyHitShader = {};
+ uint32_t intersectionShader = {};
+
};
- static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct ExternalImageFormatProperties
+ template <>
+ struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
{
- ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalMemoryProperties( externalMemoryProperties_ )
+ using Type = RayTracingShaderGroupCreateInfoNV;
+ };
+
+ struct RayTracingPipelineCreateInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
+ : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
return *this;
}
- ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingPipelineCreateInfoNV ) );
+ return *this;
}
- ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
+ stageCount = stageCount_;
+ return *this;
}
- bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pStages = pStages_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageCount = static_cast<uint32_t>( stages_.size() );
+ pStages = stages_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCount = groupCount_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pGroups = pGroups_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCount = static_cast<uint32_t>( groups_.size() );
+ pGroups = groups_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxRecursionDepth = maxRecursionDepth_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
+
+ RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
+ }
+
+
+ operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
+ }
+
+ operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default;
+#else
+ bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( externalMemoryProperties == rhs.externalMemoryProperties );
+ && ( flags == rhs.flags )
+ && ( stageCount == rhs.stageCount )
+ && ( pStages == rhs.pStages )
+ && ( groupCount == rhs.groupCount )
+ && ( pGroups == rhs.pGroups )
+ && ( maxRecursionDepth == rhs.maxRecursionDepth )
+ && ( layout == rhs.layout )
+ && ( basePipelineHandle == rhs.basePipelineHandle )
+ && ( basePipelineIndex == rhs.basePipelineIndex );
}
- bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+ uint32_t groupCount = {};
+ const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {};
+ uint32_t maxRecursionDepth = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
+
};
- static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct ImageFormatProperties
+ template <>
+ struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
{
- ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {},
- uint32_t maxMipLevels_ = {},
- uint32_t maxArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxExtent( maxExtent_ )
- , maxMipLevels( maxMipLevels_ )
- , maxArrayLayers( maxArrayLayers_ )
- , sampleCounts( sampleCounts_ )
- , maxResourceSize( maxResourceSize_ )
+ using Type = RayTracingPipelineCreateInfoNV;
+ };
+
+ struct SubpassDescription
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
{}
- ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
+ : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>(&rhs);
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
+#else
+ if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
return *this;
}
- operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+ SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageFormatProperties*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDescription ) );
+ return *this;
}
- operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageFormatProperties*>( this );
+ flags = flags_;
+ return *this;
}
- bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
- return ( maxExtent == rhs.maxExtent )
- && ( maxMipLevels == rhs.maxMipLevels )
- && ( maxArrayLayers == rhs.maxArrayLayers )
- && ( sampleCounts == rhs.sampleCounts )
- && ( maxResourceSize == rhs.maxResourceSize );
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
}
- bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ inputAttachmentCount = inputAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pInputAttachments = pInputAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
+ pInputAttachments = inputAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = colorAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pColorAttachments = pColorAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+ pColorAttachments = colorAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pResolveAttachments = pResolveAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
+ pResolveAttachments = resolveAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pDepthStencilAttachment = pDepthStencilAttachment_;
+ return *this;
+ }
+
+ SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preserveAttachmentCount = preserveAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPreserveAttachments = pPreserveAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
+ pPreserveAttachments = preserveAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassDescription*>( this );
+ }
+
+ operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassDescription*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassDescription const& ) const = default;
+#else
+ bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( inputAttachmentCount == rhs.inputAttachmentCount )
+ && ( pInputAttachments == rhs.pInputAttachments )
+ && ( colorAttachmentCount == rhs.colorAttachmentCount )
+ && ( pColorAttachments == rhs.pColorAttachments )
+ && ( pResolveAttachments == rhs.pResolveAttachments )
+ && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+ && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+ && ( pPreserveAttachments == rhs.pPreserveAttachments );
+ }
+
+ bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
- uint32_t maxMipLevels = {};
- uint32_t maxArrayLayers = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
- VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t inputAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {};
+ uint32_t colorAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {};
+ uint32_t preserveAttachmentCount = {};
+ const uint32_t* pPreserveAttachments = {};
+
};
- static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );
- struct ExternalImageFormatPropertiesNV
+ struct SubpassDependency
{
- ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageFormatProperties( imageFormatProperties_ )
- , externalMemoryFeatures( externalMemoryFeatures_ )
- , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
- , compatibleHandleTypes( compatibleHandleTypes_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ )
{}
- ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
return *this;
}
- operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+ SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDependency ) );
+ return *this;
}
- operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
+ srcSubpass = srcSubpass_;
+ return *this;
}
- bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
{
- return ( imageFormatProperties == rhs.imageFormatProperties )
- && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
- && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
- && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ dstSubpass = dstSubpass_;
+ return *this;
}
- bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcStageMask = srcStageMask_;
+ return *this;
+ }
+
+ SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstStageMask = dstStageMask_;
+ return *this;
+ }
+
+ SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyFlags = dependencyFlags_;
+ return *this;
+ }
+
+
+ operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassDependency*>( this );
+ }
+
+ operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassDependency*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassDependency const& ) const = default;
+#else
+ bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( srcSubpass == rhs.srcSubpass )
+ && ( dstSubpass == rhs.dstSubpass )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( dependencyFlags == rhs.dependencyFlags );
+ }
+
+ bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
+ uint32_t srcSubpass = {};
+ uint32_t dstSubpass = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+
};
- static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );
- struct ExternalMemoryBufferCreateInfo
+ struct RenderPassCreateInfo
{
- VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
- ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {} )
+ : flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
+ return *this;
}
- ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassCreateInfo ) );
return *this;
}
- ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ flags = flags_;
return *this;
}
- operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
+ attachmentCount = attachmentCount_;
+ return *this;
}
- operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
+ pAttachments = pAttachments_;
+ return *this;
}
- bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentCount = static_cast<uint32_t>( attachments_.size() );
+ pAttachments = attachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpassCount = subpassCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSubpasses = pSubpasses_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpassCount = static_cast<uint32_t>( subpasses_.size() );
+ pSubpasses = subpasses_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyCount = dependencyCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pDependencies = pDependencies_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyCount = static_cast<uint32_t>( dependencies_.size() );
+ pDependencies = dependencies_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
+ }
+
+ operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassCreateInfo const& ) const = default;
+#else
+ bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( flags == rhs.flags )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( subpassCount == rhs.subpassCount )
+ && ( pSubpasses == rhs.pSubpasses )
+ && ( dependencyCount == rhs.dependencyCount )
+ && ( pDependencies == rhs.pDependencies );
}
- bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+ VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {};
+ uint32_t subpassCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {};
+ uint32_t dependencyCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {};
+
};
- static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ExternalMemoryImageCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
+ {
+ using Type = RenderPassCreateInfo;
+ };
+
+ struct SubpassDescription2
{
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
+ : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) );
- return *this;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
+#else
+ if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
+ return *this;
}
- ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDescription2 ) );
return *this;
}
- ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ flags = flags_;
return *this;
}
- operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
}
- operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
+ viewMask = viewMask_;
+ return *this;
}
- bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ inputAttachmentCount = inputAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pInputAttachments = pInputAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
+ pInputAttachments = inputAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = colorAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pColorAttachments = pColorAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+ pColorAttachments = colorAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pResolveAttachments = pResolveAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
+ pResolveAttachments = resolveAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pDepthStencilAttachment = pDepthStencilAttachment_;
+ return *this;
+ }
+
+ SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preserveAttachmentCount = preserveAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPreserveAttachments = pPreserveAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
+ pPreserveAttachments = preserveAttachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassDescription2*>( this );
+ }
+
+ operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassDescription2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassDescription2 const& ) const = default;
+#else
+ bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( flags == rhs.flags )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( viewMask == rhs.viewMask )
+ && ( inputAttachmentCount == rhs.inputAttachmentCount )
+ && ( pInputAttachments == rhs.pInputAttachments )
+ && ( colorAttachmentCount == rhs.colorAttachmentCount )
+ && ( pColorAttachments == rhs.pColorAttachments )
+ && ( pResolveAttachments == rhs.pResolveAttachments )
+ && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+ && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+ && ( pPreserveAttachments == rhs.pPreserveAttachments );
}
- bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t viewMask = {};
+ uint32_t inputAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {};
+ uint32_t colorAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {};
+ uint32_t preserveAttachmentCount = {};
+ const uint32_t* pPreserveAttachments = {};
+
};
- static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
- struct ExternalMemoryImageCreateInfoNV
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassDescription2>
{
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
+ using Type = SubpassDescription2;
+ };
+ using SubpassDescription2KHR = SubpassDescription2;
+
+ struct SubpassDependency2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ), viewOffset( viewOffset_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
+ return *this;
}
- ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDependency2 ) );
return *this;
}
- ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
{
- handleTypes = handleTypes_;
+ srcSubpass = srcSubpass_;
return *this;
}
- operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
+ dstSubpass = dstSubpass_;
+ return *this;
}
- operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
+ srcStageMask = srcStageMask_;
+ return *this;
}
- bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstStageMask = dstStageMask_;
+ return *this;
+ }
+
+ SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyFlags = dependencyFlags_;
+ return *this;
+ }
+
+ SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewOffset = viewOffset_;
+ return *this;
+ }
+
+
+ operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassDependency2*>( this );
+ }
+
+ operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassDependency2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassDependency2 const& ) const = default;
+#else
+ bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleTypes == rhs.handleTypes );
+ && ( srcSubpass == rhs.srcSubpass )
+ && ( dstSubpass == rhs.dstSubpass )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( dependencyFlags == rhs.dependencyFlags )
+ && ( viewOffset == rhs.viewOffset );
}
- bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+ uint32_t srcSubpass = {};
+ uint32_t dstSubpass = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+ int32_t viewOffset = {};
+
};
- static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
- struct ExternalSemaphoreProperties
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassDependency2>
{
- ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
- , compatibleHandleTypes( compatibleHandleTypes_ )
- , externalSemaphoreFeatures( externalSemaphoreFeatures_ )
+ using Type = SubpassDependency2;
+ };
+ using SubpassDependency2KHR = SubpassDependency2;
+
+ struct RenderPassCreateInfo2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t* pCorrelatedViewMasks_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ), correlatedViewMaskCount( correlatedViewMaskCount_ ), pCorrelatedViewMasks( pCorrelatedViewMasks_ )
{}
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {} )
+ : flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
return *this;
}
- ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassCreateInfo2 ) );
+ return *this;
}
- ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
+ attachmentCount = attachmentCount_;
+ return *this;
}
- bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pAttachments = pAttachments_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentCount = static_cast<uint32_t>( attachments_.size() );
+ pAttachments = attachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpassCount = subpassCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSubpasses = pSubpasses_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpassCount = static_cast<uint32_t>( subpasses_.size() );
+ pSubpasses = subpasses_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyCount = dependencyCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pDependencies = pDependencies_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyCount = static_cast<uint32_t>( dependencies_.size() );
+ pDependencies = dependencies_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ correlatedViewMaskCount = correlatedViewMaskCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCorrelatedViewMasks = pCorrelatedViewMasks_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
+ pCorrelatedViewMasks = correlatedViewMasks_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
+ }
+
+ operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassCreateInfo2 const& ) const = default;
+#else
+ bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
- && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
- && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+ && ( flags == rhs.flags )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( subpassCount == rhs.subpassCount )
+ && ( pSubpasses == rhs.pSubpasses )
+ && ( dependencyCount == rhs.dependencyCount )
+ && ( pDependencies == rhs.pDependencies )
+ && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
+ && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
}
- bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {};
+ uint32_t subpassCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {};
+ uint32_t dependencyCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {};
+ uint32_t correlatedViewMaskCount = {};
+ const uint32_t* pCorrelatedViewMasks = {};
+
};
- static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
- struct FenceCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
{
- VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ using Type = RenderPassCreateInfo2;
+ };
+ using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
+
+ struct SamplerCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), magFilter( magFilter_ ), minFilter( minFilter_ ), mipmapMode( mipmapMode_ ), addressModeU( addressModeU_ ), addressModeV( addressModeV_ ), addressModeW( addressModeW_ ), mipLodBias( mipLodBias_ ), anisotropyEnable( anisotropyEnable_ ), maxAnisotropy( maxAnisotropy_ ), compareEnable( compareEnable_ ), compareOp( compareOp_ ), minLod( minLod_ ), maxLod( maxLod_ ), borderColor( borderColor_ ), unnormalizedCoordinates( unnormalizedCoordinates_ )
{}
- VULKAN_HPP_NAMESPACE::FenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) - offsetof( FenceCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
+ return *this;
}
- FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerCreateInfo ) );
return *this;
}
- FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFenceCreateInfo*>( this );
+ magFilter = magFilter_;
+ return *this;
}
- operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFenceCreateInfo*>( this );
+ minFilter = minFilter_;
+ return *this;
}
- bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mipmapMode = mipmapMode_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressModeU = addressModeU_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressModeV = addressModeV_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressModeW = addressModeW_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mipLodBias = mipLodBias_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ anisotropyEnable = anisotropyEnable_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxAnisotropy = maxAnisotropy_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ compareEnable = compareEnable_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ compareOp = compareOp_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
+ {
+ minLod = minLod_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxLod = maxLod_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ borderColor = borderColor_;
+ return *this;
+ }
+
+ SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+ {
+ unnormalizedCoordinates = unnormalizedCoordinates_;
+ return *this;
+ }
+
+
+ operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
+ }
+
+ operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSamplerCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SamplerCreateInfo const& ) const = default;
+#else
+ bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags );
+ && ( flags == rhs.flags )
+ && ( magFilter == rhs.magFilter )
+ && ( minFilter == rhs.minFilter )
+ && ( mipmapMode == rhs.mipmapMode )
+ && ( addressModeU == rhs.addressModeU )
+ && ( addressModeV == rhs.addressModeV )
+ && ( addressModeW == rhs.addressModeW )
+ && ( mipLodBias == rhs.mipLodBias )
+ && ( anisotropyEnable == rhs.anisotropyEnable )
+ && ( maxAnisotropy == rhs.maxAnisotropy )
+ && ( compareEnable == rhs.compareEnable )
+ && ( compareOp == rhs.compareOp )
+ && ( minLod == rhs.minLod )
+ && ( maxLod == rhs.maxLod )
+ && ( borderColor == rhs.borderColor )
+ && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
}
- bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ float mipLodBias = {};
+ VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
+ float maxAnisotropy = {};
+ VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ float minLod = {};
+ float maxLod = {};
+ VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
+ VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
+
};
- static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct FenceGetFdInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eSamplerCreateInfo>
{
- VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , handleType( handleType_ )
+ using Type = SamplerCreateInfo;
+ };
+
+ struct SamplerYcbcrConversionCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}) VULKAN_HPP_NOEXCEPT
+ : format( format_ ), ycbcrModel( ycbcrModel_ ), ycbcrRange( ycbcrRange_ ), components( components_ ), xChromaOffset( xChromaOffset_ ), yChromaOffset( yChromaOffset_ ), chromaFilter( chromaFilter_ ), forceExplicitReconstruction( forceExplicitReconstruction_ )
{}
- VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) - offsetof( FenceGetFdInfoKHR, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
+ return *this;
}
- FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) );
return *this;
}
- FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
- fence = fence_;
+ format = format_;
return *this;
}
- FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ ycbcrModel = ycbcrModel_;
return *this;
}
- operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
+ ycbcrRange = ycbcrRange_;
+ return *this;
}
- operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
+ components = components_;
+ return *this;
}
- bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ xChromaOffset = xChromaOffset_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ yChromaOffset = yChromaOffset_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
+ {
+ chromaFilter = chromaFilter_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
+ {
+ forceExplicitReconstruction = forceExplicitReconstruction_;
+ return *this;
+ }
+
+
+ operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
+ }
+
+ operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default;
+#else
+ bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( fence == rhs.fence )
- && ( handleType == rhs.handleType );
+ && ( format == rhs.format )
+ && ( ycbcrModel == rhs.ycbcrModel )
+ && ( ycbcrRange == rhs.ycbcrRange )
+ && ( components == rhs.components )
+ && ( xChromaOffset == rhs.xChromaOffset )
+ && ( yChromaOffset == rhs.yChromaOffset )
+ && ( chromaFilter == rhs.chromaFilter )
+ && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
}
- bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+ VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
+
};
- static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
+ {
+ using Type = SamplerYcbcrConversionCreateInfo;
+ };
+ using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
- struct FenceGetWin32HandleInfoKHR
+ class SamplerYcbcrConversion
{
- VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , handleType( handleType_ )
+ public:
+ using CType = VkSamplerYcbcrConversion;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
+
+ public:
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT
+ : m_samplerYcbcrConversion(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
+ : m_samplerYcbcrConversion( samplerYcbcrConversion )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT
+ {
+ m_samplerYcbcrConversion = samplerYcbcrConversion;
+ return *this;
+ }
+#endif
+
+ SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) );
+ m_samplerYcbcrConversion = VK_NULL_HANDLE;
return *this;
}
- FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SamplerYcbcrConversion const& ) const = default;
+#else
+ bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
+ }
+
+ bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
+ }
+
+ bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_samplerYcbcrConversion;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_samplerYcbcrConversion != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_samplerYcbcrConversion == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSamplerYcbcrConversion m_samplerYcbcrConversion;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSamplerYcbcrConversion>
+ {
+ using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+ using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
+
+ struct SemaphoreCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
return *this;
}
- FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreCreateInfo ) );
return *this;
}
- FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- fence = fence_;
+ pNext = pNext_;
return *this;
}
- FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ flags = flags_;
return *this;
}
- operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
}
- operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
}
- bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreCreateInfo const& ) const = default;
+#else
+ bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( fence == rhs.fence )
- && ( handleType == rhs.handleType );
+ && ( flags == rhs.flags );
}
- bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
+
};
- static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct FilterCubicImageViewImageFormatPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
{
- FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT
- : filterCubic( filterCubic_ )
- , filterCubicMinmax( filterCubicMinmax_ )
+ using Type = SemaphoreCreateInfo;
+ };
+
+ struct ShaderModuleCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t* pCode_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), codeSize( codeSize_ ), pCode( pCode_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ )
+ : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
return *this;
}
- FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderModuleCreateInfo ) );
+ return *this;
}
- FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+ codeSize = codeSize_;
+ return *this;
}
- bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCode = pCode_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
+ {
+ codeSize = code_.size() * 4;
+ pCode = code_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
+ }
+
+ operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShaderModuleCreateInfo const& ) const = default;
+#else
+ bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( filterCubic == rhs.filterCubic )
- && ( filterCubicMinmax == rhs.filterCubicMinmax );
+ && ( flags == rhs.flags )
+ && ( codeSize == rhs.codeSize )
+ && ( pCode == rhs.pCode );
}
- bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
- VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
+ size_t codeSize = {};
+ const uint32_t* pCode = {};
+
};
- static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct FormatProperties
+ template <>
+ struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
{
- FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : linearTilingFeatures( linearTilingFeatures_ )
- , optimalTilingFeatures( optimalTilingFeatures_ )
- , bufferFeatures( bufferFeatures_ )
+ using Type = ShaderModuleCreateInfo;
+ };
+
+ class SurfaceKHR
+ {
+ public:
+ using CType = VkSurfaceKHR;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
+
+ public:
+ VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT
+ : m_surfaceKHR(VK_NULL_HANDLE)
{}
- FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_surfaceKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
+ : m_surfaceKHR( surfaceKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ m_surfaceKHR = surfaceKHR;
+ return *this;
}
+#endif
- FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>(&rhs);
+ m_surfaceKHR = VK_NULL_HANDLE;
return *this;
}
- operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceKHR const& ) const = default;
+#else
+ bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFormatProperties*>( this );
+ return m_surfaceKHR == rhs.m_surfaceKHR;
}
- operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
+ bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFormatProperties*>( this );
+ return m_surfaceKHR != rhs.m_surfaceKHR;
}
- bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( linearTilingFeatures == rhs.linearTilingFeatures )
- && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
- && ( bufferFeatures == rhs.bufferFeatures );
+ return m_surfaceKHR < rhs.m_surfaceKHR;
}
+#endif
- bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_surfaceKHR;
}
- public:
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_surfaceKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_surfaceKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSurfaceKHR m_surfaceKHR;
};
- static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
- struct FormatProperties2
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSurfaceKHR>
+ {
+ using type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SurfaceKHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct SwapchainCreateInfoKHR
{
- FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : formatProperties( formatProperties_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
{}
- VULKAN_HPP_NAMESPACE::FormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) - offsetof( FormatProperties2, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} )
+ : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
return *this;
}
- FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SwapchainCreateInfoKHR ) );
+ return *this;
}
- FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFormatProperties2*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFormatProperties2*>( this );
+ surface = surface_;
+ return *this;
}
- bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( formatProperties == rhs.formatProperties );
+ minImageCount = minImageCount_;
+ return *this;
}
- bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ imageFormat = imageFormat_;
+ return *this;
}
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
- };
- static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );
+ SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageColorSpace = imageColorSpace_;
+ return *this;
+ }
- struct FramebufferAttachmentImageInfo
- {
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- uint32_t width_ = {},
- uint32_t height_ = {},
- uint32_t layerCount_ = {},
- uint32_t viewFormatCount_ = {},
- const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , usage( usage_ )
- , width( width_ )
- , height( height_ )
- , layerCount( layerCount_ )
- , viewFormatCount( viewFormatCount_ )
- , pViewFormats( pViewFormats_ )
- {}
+ SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
- VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) - offsetof( FramebufferAttachmentImageInfo, pNext ) );
+ imageArrayLayers = imageArrayLayers_;
return *this;
}
- FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ imageUsage = imageUsage_;
+ return *this;
}
- FramebufferAttachmentImageInfo& operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>(&rhs);
+ imageSharingMode = imageSharingMode_;
return *this;
}
- FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
- FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
- FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
- usage = usage_;
+ queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+ pQueueFamilyIndices = queueFamilyIndices_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
{
- width = width_;
+ preTransform = preTransform_;
return *this;
}
- FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
{
- height = height_;
+ compositeAlpha = compositeAlpha_;
return *this;
}
- FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
{
- layerCount = layerCount_;
+ presentMode = presentMode_;
return *this;
}
- FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
{
- viewFormatCount = viewFormatCount_;
+ clipped = clipped_;
return *this;
}
- FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
{
- pViewFormats = pViewFormats_;
+ oldSwapchain = oldSwapchain_;
return *this;
}
- operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
+ return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
}
- operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
+ return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
}
- bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SwapchainCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( usage == rhs.usage )
- && ( width == rhs.width )
- && ( height == rhs.height )
- && ( layerCount == rhs.layerCount )
- && ( viewFormatCount == rhs.viewFormatCount )
- && ( pViewFormats == rhs.pViewFormats );
+ && ( surface == rhs.surface )
+ && ( minImageCount == rhs.minImageCount )
+ && ( imageFormat == rhs.imageFormat )
+ && ( imageColorSpace == rhs.imageColorSpace )
+ && ( imageExtent == rhs.imageExtent )
+ && ( imageArrayLayers == rhs.imageArrayLayers )
+ && ( imageUsage == rhs.imageUsage )
+ && ( imageSharingMode == rhs.imageSharingMode )
+ && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+ && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+ && ( preTransform == rhs.preTransform )
+ && ( compositeAlpha == rhs.compositeAlpha )
+ && ( presentMode == rhs.presentMode )
+ && ( clipped == rhs.clipped )
+ && ( oldSwapchain == rhs.oldSwapchain );
}
- bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
- uint32_t width = {};
- uint32_t height = {};
- uint32_t layerCount = {};
- uint32_t viewFormatCount = {};
- const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
+ VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+ uint32_t minImageCount = {};
+ VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+ uint32_t imageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
+ VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t* pQueueFamilyIndices = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
+ VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
+ VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
+
};
- static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct FramebufferAttachmentsCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
{
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentImageInfoCount( attachmentImageInfoCount_ )
- , pAttachmentImageInfos( pAttachmentImageInfos_ )
+ using Type = SwapchainCreateInfoKHR;
+ };
+
+ struct ValidationCacheCreateInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
{}
- VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) - offsetof( FramebufferAttachmentsCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
- FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
+ : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
+ return *this;
}
- FramebufferAttachmentsCreateInfo& operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ValidationCacheCreateInfoEXT ) );
return *this;
}
- FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
- attachmentImageInfoCount = attachmentImageInfoCount_;
+ flags = flags_;
return *this;
}
- FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
{
- pAttachmentImageInfos = pAttachmentImageInfos_;
+ initialDataSize = initialDataSize_;
return *this;
}
- operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
+ pInitialData = pInitialData_;
+ return *this;
}
- operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
+ initialDataSize = initialData_.size() * sizeof(T);
+ pInitialData = initialData_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
+ }
+
+ operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
- && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
+ && ( flags == rhs.flags )
+ && ( initialDataSize == rhs.initialDataSize )
+ && ( pInitialData == rhs.pInitialData );
}
- bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
const void* pNext = {};
- uint32_t attachmentImageInfoCount = {};
- const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {};
+ VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
+ size_t initialDataSize = {};
+ const void* pInitialData = {};
+
};
- static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct FramebufferCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
+ {
+ using Type = ValidationCacheCreateInfoEXT;
+ };
+
+ class ValidationCacheEXT
{
- VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {},
- uint32_t width_ = {},
- uint32_t height_ = {},
- uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , renderPass( renderPass_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , width( width_ )
- , height( height_ )
- , layers( layers_ )
+ public:
+ using CType = VkValidationCacheEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
+
+ public:
+ VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT
+ : m_validationCacheEXT(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_validationCacheEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
+ : m_validationCacheEXT( validationCacheEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) - offsetof( FramebufferCreateInfo, pNext ) );
+ m_validationCacheEXT = validationCacheEXT;
return *this;
}
+#endif
- FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_validationCacheEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ValidationCacheEXT const& ) const = default;
+#else
+ bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_validationCacheEXT == rhs.m_validationCacheEXT;
+ }
+
+ bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_validationCacheEXT != rhs.m_validationCacheEXT;
+ }
+
+ bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_validationCacheEXT < rhs.m_validationCacheEXT;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_validationCacheEXT;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_validationCacheEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_validationCacheEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkValidationCacheEXT m_validationCacheEXT;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eValidationCacheEXT>
+ {
+ using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct DisplayPowerInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff) VULKAN_HPP_NOEXCEPT
+ : powerState( powerState_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
return *this;
}
- FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPowerInfoEXT ) );
+ return *this;
+ }
+
+ DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ powerState = powerState_;
return *this;
}
- FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- renderPass = renderPass_;
+ return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
+ }
+
+ operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPowerInfoEXT const& ) const = default;
+#else
+ bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( powerState == rhs.powerState );
+ }
+
+ bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
+
+ };
+ static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
+ {
+ using Type = DisplayPowerInfoEXT;
+ };
+
+ struct MappedMemoryRange
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ ), offset( offset_ ), size( size_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
return *this;
}
- FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- attachmentCount = attachmentCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MappedMemoryRange ) );
return *this;
}
- FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- pAttachments = pAttachments_;
+ pNext = pNext_;
return *this;
}
- FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
- width = width_;
+ memory = memory_;
return *this;
}
- FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
- height = height_;
+ offset = offset_;
return *this;
}
- FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
- layers = layers_;
+ size = size_;
return *this;
}
- operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
+ return *reinterpret_cast<const VkMappedMemoryRange*>( this );
}
- operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
+ return *reinterpret_cast<VkMappedMemoryRange*>( this );
}
- bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MappedMemoryRange const& ) const = default;
+#else
+ bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( renderPass == rhs.renderPass )
- && ( attachmentCount == rhs.attachmentCount )
- && ( pAttachments == rhs.pAttachments )
- && ( width == rhs.width )
- && ( height == rhs.height )
- && ( layers == rhs.layers );
+ && ( memory == rhs.memory )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
}
- bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
- uint32_t attachmentCount = {};
- const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
- uint32_t width = {};
- uint32_t height = {};
- uint32_t layers = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
};
- static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
- struct FramebufferMixedSamplesCombinationNV
+ template <>
+ struct CppType<StructureType, StructureType::eMappedMemoryRange>
+ {
+ using Type = MappedMemoryRange;
+ };
+
+ struct MemoryRequirements
{
- FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT
- : coverageReductionMode( coverageReductionMode_ )
- , rasterizationSamples( rasterizationSamples_ )
- , depthStencilSamples( depthStencilSamples_ )
- , colorSamples( colorSamples_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+ : size( size_ ), alignment( alignment_ ), memoryTypeBits( memoryTypeBits_ )
{}
- VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV & operator=( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
return *this;
}
- FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryRequirements ) );
+ return *this;
+ }
+
+
+ operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryRequirements*>( this );
+ }
+
+ operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryRequirements*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryRequirements const& ) const = default;
+#else
+ bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( size == rhs.size )
+ && ( alignment == rhs.alignment )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
+ uint32_t memoryTypeBits = {};
+
+ };
+ static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+
+ struct MemoryRequirements2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryRequirements( memoryRequirements_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
return *this;
}
- operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT
+ MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryRequirements2 ) );
+ return *this;
}
- operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
+ return *reinterpret_cast<const VkMemoryRequirements2*>( this );
}
- bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryRequirements2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryRequirements2 const& ) const = default;
+#else
+ bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( coverageReductionMode == rhs.coverageReductionMode )
- && ( rasterizationSamples == rhs.rasterizationSamples )
- && ( depthStencilSamples == rhs.depthStencilSamples )
- && ( colorSamples == rhs.colorSamples );
+ && ( memoryRequirements == rhs.memoryRequirements );
}
- bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
+
};
- static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
- struct VertexInputBindingDescription
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryRequirements2>
{
- VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {},
- uint32_t stride_ = {},
- VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
- : binding( binding_ )
- , stride( stride_ )
- , inputRate( inputRate_ )
+ using Type = MemoryRequirements2;
+ };
+ using MemoryRequirements2KHR = MemoryRequirements2;
+
+ struct DeviceGroupPresentCapabilitiesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
+ : presentMask( presentMask_ ), modes( modes_ )
{}
- VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
return *this;
}
- VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- binding = binding_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupPresentCapabilitiesKHR ) );
return *this;
}
- VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- stride = stride_;
+ return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
+ }
+
+ operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default;
+#else
+ bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( presentMask == rhs.presentMask )
+ && ( modes == rhs.modes );
+ }
+
+ bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
+ };
+ static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
+ {
+ using Type = DeviceGroupPresentCapabilitiesKHR;
+ };
+
+ struct PhysicalDeviceSurfaceInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}) VULKAN_HPP_NOEXCEPT
+ : surface( surface_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
return *this;
}
- VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- inputRate = inputRate_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
return *this;
}
- operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
+ pNext = pNext_;
+ return *this;
}
- operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
+ surface = surface_;
+ return *this;
}
- bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return ( binding == rhs.binding )
- && ( stride == rhs.stride )
- && ( inputRate == rhs.inputRate );
+ return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
}
- bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( surface == rhs.surface );
+ }
+
+ bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t binding = {};
- uint32_t stride = {};
- VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+
};
- static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
- struct VertexInputAttributeDescription
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
{
- VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {},
- uint32_t binding_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT
- : location( location_ )
- , binding( binding_ )
- , format( format_ )
- , offset( offset_ )
+ using Type = PhysicalDeviceSurfaceInfo2KHR;
+ };
+
+ struct DeviceMemoryOpaqueCaptureAddressInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
{}
- VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
return *this;
}
- VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- location = location_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) );
return *this;
}
- VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- binding = binding_;
+ pNext = pNext_;
return *this;
}
- VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
- format = format_;
+ memory = memory_;
return *this;
}
- VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT
{
- offset = offset_;
+ return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+ }
+
+ operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const& ) const = default;
+#else
+ bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory );
+ }
+
+ bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+
+ };
+ static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
+ {
+ using Type = DeviceMemoryOpaqueCaptureAddressInfo;
+ };
+ using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
+
+ struct PresentInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, const uint32_t* pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result* pResults_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), swapchainCount( swapchainCount_ ), pSwapchains( pSwapchains_ ), pImageIndices( pImageIndices_ ), pResults( pResults_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {} )
+ : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast<uint32_t>( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
+ VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
+ VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
+#else
+ if ( swapchains_.size() != imageIndices_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
+ }
+ if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
+ }
+ if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
return *this;
}
- operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentInfoKHR ) );
+ return *this;
}
- operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
- return ( location == rhs.location )
- && ( binding == rhs.binding )
- && ( format == rhs.format )
- && ( offset == rhs.offset );
+ waitSemaphoreCount = waitSemaphoreCount_;
+ return *this;
}
- bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pWaitSemaphores = pWaitSemaphores_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+ pWaitSemaphores = waitSemaphores_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapchainCount = swapchainCount_;
+ return *this;
+ }
+
+ PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSwapchains = pSwapchains_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapchainCount = static_cast<uint32_t>( swapchains_.size() );
+ pSwapchains = swapchains_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pImageIndices = pImageIndices_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
+ pImageIndices = imageIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pResults = pResults_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapchainCount = static_cast<uint32_t>( results_.size() );
+ pResults = results_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPresentInfoKHR*>( this );
+ }
+
+ operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPresentInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PresentInfoKHR const& ) const = default;
+#else
+ bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pSwapchains == rhs.pSwapchains )
+ && ( pImageIndices == rhs.pImageIndices )
+ && ( pResults == rhs.pResults );
+ }
+
+ bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t location = {};
- uint32_t binding = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- uint32_t offset = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
+ const void* pNext = {};
+ uint32_t waitSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+ uint32_t swapchainCount = {};
+ const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {};
+ const uint32_t* pImageIndices = {};
+ VULKAN_HPP_NAMESPACE::Result* pResults = {};
+
};
- static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct PipelineVertexInputStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePresentInfoKHR>
{
- VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {},
- uint32_t vertexBindingDescriptionCount_ = {},
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {},
- uint32_t vertexAttributeDescriptionCount_ = {},
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
- , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
- , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
- , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
+ using Type = PresentInfoKHR;
+ };
+
+ struct SubmitInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), pWaitDstStageMask( pWaitDstStageMask_ ), commandBufferCount( commandBufferCount_ ), pCommandBuffers( pCommandBuffers_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
- PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
+ : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
{
- *this = rhs;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
+#else
+ if ( waitSemaphores_.size() != waitDstStageMask_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
return *this;
}
- PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubmitInfo ) );
+ return *this;
+ }
+
+ SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
- PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
- vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+ pWaitSemaphores = pWaitSemaphores_;
return *this;
}
- PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
- pVertexBindingDescriptions = pVertexBindingDescriptions_;
+ waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+ pWaitSemaphores = waitSemaphores_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
- vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+ pWaitDstStageMask = pWaitDstStageMask_;
return *this;
}
- PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
- pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+ waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
+ pWaitDstStageMask = waitDstStageMask_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
+ commandBufferCount = commandBufferCount_;
+ return *this;
}
- operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
+ pCommandBuffers = pCommandBuffers_;
+ return *this;
}
- bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
+ pCommandBuffers = commandBuffers_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreCount = signalSemaphoreCount_;
+ return *this;
+ }
+
+ SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSignalSemaphores = pSignalSemaphores_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
+ pSignalSemaphores = signalSemaphores_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubmitInfo*>( this );
+ }
+
+ operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubmitInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubmitInfo const& ) const = default;
+#else
+ bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
- && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
- && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
- && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+ && ( commandBufferCount == rhs.commandBufferCount )
+ && ( pCommandBuffers == rhs.pCommandBuffers )
+ && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+ && ( pSignalSemaphores == rhs.pSignalSemaphores );
}
- bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
- uint32_t vertexBindingDescriptionCount = {};
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {};
- uint32_t vertexAttributeDescriptionCount = {};
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {};
+ uint32_t waitSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+ const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {};
+ uint32_t commandBufferCount = {};
+ const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {};
+ uint32_t signalSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
+
};
- static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );
- struct PipelineInputAssemblyStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSubmitInfo>
{
- VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
- VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , topology( topology_ )
- , primitiveRestartEnable( primitiveRestartEnable_ )
+ using Type = SubmitInfo;
+ };
+
+ class Queue
+ {
+ public:
+ using CType = VkQueue;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT
+ : m_queue(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_queue(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT
+ : m_queue( queue )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) );
+ m_queue = queue;
return *this;
}
+#endif
- PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_queue = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Queue const& ) const = default;
+#else
+ bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queue == rhs.m_queue;
+ }
+
+ bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queue != rhs.m_queue;
+ }
+
+ bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queue < rhs.m_queue;
+ }
+#endif
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
+ std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const &bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const &submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queue;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queue != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_queue == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkQueue m_queue;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueue>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Queue;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueue>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Queue;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Queue;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Queue>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct DeviceQueueInfo2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueIndex( queueIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceQueueInfo2 ) );
+ return *this;
+ }
+
+ DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
- topology = topology_;
+ queueFamilyIndex = queueFamilyIndex_;
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
{
- primitiveRestartEnable = primitiveRestartEnable_;
+ queueIndex = queueIndex_;
return *this;
}
- operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
}
- operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
+ return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
}
- bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceQueueInfo2 const& ) const = default;
+#else
+ bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( topology == rhs.topology )
- && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+ && ( queueFamilyIndex == rhs.queueFamilyIndex )
+ && ( queueIndex == rhs.queueIndex );
}
- bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
- VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
+ VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+ uint32_t queueFamilyIndex = {};
+ uint32_t queueIndex = {};
+
};
- static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
- struct PipelineTessellationStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
{
- VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {},
- uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , patchControlPoints( patchControlPoints_ )
+ using Type = DeviceQueueInfo2;
+ };
+
+ struct FenceGetFdInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ ), handleType( handleType_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
return *this;
}
- PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FenceGetFdInfoKHR ) );
+ return *this;
+ }
+
+ FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+
+ operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
+ }
+
+ operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FenceGetFdInfoKHR const& ) const = default;
+#else
+ bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+ };
+ static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
+ {
+ using Type = FenceGetFdInfoKHR;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct FenceGetWin32HandleInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ ), handleType( handleType_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
+ return *this;
+ }
+
+ FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ fence = fence_;
return *this;
}
- PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- patchControlPoints = patchControlPoints_;
+ handleType = handleType_;
return *this;
}
- operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
}
- operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
+ return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
}
- bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( patchControlPoints == rhs.patchControlPoints );
+ && ( fence == rhs.fence )
+ && ( handleType == rhs.handleType );
}
- bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
- uint32_t patchControlPoints = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
};
- static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct Viewport
+ template <>
+ struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
{
- VULKAN_HPP_CONSTEXPR Viewport( float x_ = {},
- float y_ = {},
- float width_ = {},
- float height_ = {},
- float minDepth_ = {},
- float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
- , width( width_ )
- , height( height_ )
- , minDepth( minDepth_ )
- , maxDepth( maxDepth_ )
+ using Type = FenceGetWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct GeneratedCommandsMemoryRequirementsInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), maxSequencesCount( maxSequencesCount_ )
{}
- Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
return *this;
}
- Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- x = x_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) );
return *this;
}
- Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- y = y_;
+ pNext = pNext_;
return *this;
}
- Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
- width = width_;
+ pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
- height = height_;
+ pipeline = pipeline_;
return *this;
}
- Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
- minDepth = minDepth_;
+ indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
- Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxDepth = maxDepth_;
+ maxSequencesCount = maxSequencesCount_;
return *this;
}
- operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkViewport*>( this );
+ return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
}
- operator VkViewport &() VULKAN_HPP_NOEXCEPT
+ operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkViewport*>( this );
+ return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
}
- bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default;
+#else
+ bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( x == rhs.x )
- && ( y == rhs.y )
- && ( width == rhs.width )
- && ( height == rhs.height )
- && ( minDepth == rhs.minDepth )
- && ( maxDepth == rhs.maxDepth );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( pipeline == rhs.pipeline )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( maxSequencesCount == rhs.maxSequencesCount );
}
- bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- float x = {};
- float y = {};
- float width = {};
- float height = {};
- float minDepth = {};
- float maxDepth = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+ uint32_t maxSequencesCount = {};
+
};
- static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeneratedCommandsMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct PipelineViewportStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
{
- VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {},
- uint32_t scissorCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , viewportCount( viewportCount_ )
- , pViewports( pViewports_ )
- , scissorCount( scissorCount_ )
- , pScissors( pScissors_ )
+ using Type = GeneratedCommandsMemoryRequirementsInfoNV;
+ };
+
+ struct ImageDrmFormatModifierPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
return *this;
}
- PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageDrmFormatModifierPropertiesEXT ) );
+ return *this;
+ }
+
+
+ operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
+ }
+
+ operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default;
+#else
+ bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( drmFormatModifier == rhs.drmFormatModifier );
+ }
+
+ bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+ void* pNext = {};
+ uint64_t drmFormatModifier = {};
+
+ };
+ static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
+ {
+ using Type = ImageDrmFormatModifierPropertiesEXT;
+ };
+
+ struct ImageMemoryRequirementsInfo2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
return *this;
}
- PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageMemoryRequirementsInfo2 ) );
+ return *this;
+ }
+
+ ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ image = image_;
return *this;
}
- PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
{
- viewportCount = viewportCount_;
+ return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
+ }
+
+ operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default;
+#else
+ bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( image == rhs.image );
+ }
+
+ bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+
+ };
+ static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
+ {
+ using Type = ImageMemoryRequirementsInfo2;
+ };
+ using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+
+ struct SparseImageFormatProperties
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ ), imageGranularity( imageGranularity_ ), flags( flags_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
return *this;
}
- PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pViewports = pViewports_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageFormatProperties ) );
return *this;
}
- PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
{
- scissorCount = scissorCount_;
+ return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
+ }
+
+ operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageFormatProperties const& ) const = default;
+#else
+ bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( imageGranularity == rhs.imageGranularity )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
+ VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
+
+ };
+ static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct SparseImageMemoryRequirements
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT
+ : formatProperties( formatProperties_ ), imageMipTailFirstLod( imageMipTailFirstLod_ ), imageMipTailSize( imageMipTailSize_ ), imageMipTailOffset( imageMipTailOffset_ ), imageMipTailStride( imageMipTailStride_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
return *this;
}
- PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pScissors = pScissors_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryRequirements ) );
return *this;
}
- operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
}
- operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
+ return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
}
- bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageMemoryRequirements const& ) const = default;
+#else
+ bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( formatProperties == rhs.formatProperties )
+ && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
+ && ( imageMipTailSize == rhs.imageMipTailSize )
+ && ( imageMipTailOffset == rhs.imageMipTailOffset )
+ && ( imageMipTailStride == rhs.imageMipTailStride );
+ }
+
+ bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
+ uint32_t imageMipTailFirstLod = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
+
+ };
+ static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageSparseMemoryRequirementsInfo2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
+ return *this;
+ }
+
+ ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) );
+ return *this;
+ }
+
+ ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image = image_;
+ return *this;
+ }
+
+
+ operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
+ }
+
+ operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default;
+#else
+ bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( viewportCount == rhs.viewportCount )
- && ( pViewports == rhs.pViewports )
- && ( scissorCount == rhs.scissorCount )
- && ( pScissors == rhs.pScissors );
+ && ( image == rhs.image );
}
- bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
- uint32_t viewportCount = {};
- const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {};
- uint32_t scissorCount = {};
- const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+
};
- static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
- struct PipelineRasterizationStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {},
- VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
- VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {},
- VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
- VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {},
- float depthBiasConstantFactor_ = {},
- float depthBiasClamp_ = {},
- float depthBiasSlopeFactor_ = {},
- float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , depthClampEnable( depthClampEnable_ )
- , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
- , polygonMode( polygonMode_ )
- , cullMode( cullMode_ )
- , frontFace( frontFace_ )
- , depthBiasEnable( depthBiasEnable_ )
- , depthBiasConstantFactor( depthBiasConstantFactor_ )
- , depthBiasClamp( depthBiasClamp_ )
- , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
- , lineWidth( lineWidth_ )
+ using Type = ImageSparseMemoryRequirementsInfo2;
+ };
+ using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+
+ struct SparseImageMemoryRequirements2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryRequirements( memoryRequirements_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
return *this;
}
- PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryRequirements2 ) );
+ return *this;
+ }
+
+
+ operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
+ }
+
+ operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default;
+#else
+ bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryRequirements == rhs.memoryRequirements );
+ }
+
+ bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
+
+ };
+ static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
+ {
+ using Type = SparseImageMemoryRequirements2;
+ };
+ using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+
+ struct SubresourceLayout
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT
+ : offset( offset_ ), size( size_ ), rowPitch( rowPitch_ ), arrayPitch( arrayPitch_ ), depthPitch( depthPitch_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
return *this;
}
- PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubresourceLayout ) );
return *this;
}
- PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ return *reinterpret_cast<const VkSubresourceLayout*>( this );
+ }
+
+ operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubresourceLayout*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubresourceLayout const& ) const = default;
+#else
+ bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( offset == rhs.offset )
+ && ( size == rhs.size )
+ && ( rowPitch == rhs.rowPitch )
+ && ( arrayPitch == rhs.arrayPitch )
+ && ( depthPitch == rhs.depthPitch );
+ }
+
+ bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
+
+ };
+ static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageViewAddressPropertiesNVX
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceAddress( deviceAddress_ ), size( size_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- depthClampEnable = depthClampEnable_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewAddressPropertiesNVX ) );
return *this;
}
- PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImageViewAddressPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
{
- rasterizerDiscardEnable = rasterizerDiscardEnable_;
+ return *reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
+ }
+
+ operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageViewAddressPropertiesNVX const& ) const = default;
+#else
+ bool operator==( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceAddress == rhs.deviceAddress )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+ };
+ static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
+ {
+ using Type = ImageViewAddressPropertiesNVX;
+ };
+
+ struct ImageViewHandleInfoNVX
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}) VULKAN_HPP_NOEXCEPT
+ : imageView( imageView_ ), descriptorType( descriptorType_ ), sampler( sampler_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
return *this;
}
- PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- polygonMode = polygonMode_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewHandleInfoNVX ) );
return *this;
}
- PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- cullMode = cullMode_;
+ pNext = pNext_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
- frontFace = frontFace_;
+ imageView = imageView_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
- depthBiasEnable = depthBiasEnable_;
+ descriptorType = descriptorType_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
- depthBiasConstantFactor = depthBiasConstantFactor_;
+ sampler = sampler_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT
{
- depthBiasClamp = depthBiasClamp_;
+ return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
+ }
+
+ operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageViewHandleInfoNVX const& ) const = default;
+#else
+ bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( imageView == rhs.imageView )
+ && ( descriptorType == rhs.descriptorType )
+ && ( sampler == rhs.sampler );
+ }
+
+ bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+
+ };
+ static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
+ {
+ using Type = ImageViewHandleInfoNVX;
+ };
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ struct MemoryGetAndroidHardwareBufferInfoANDROID
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- depthBiasSlopeFactor = depthBiasSlopeFactor_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) );
return *this;
}
- PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- lineWidth = lineWidth_;
+ pNext = pNext_;
return *this;
}
- operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
+ memory = memory_;
+ return *this;
}
- operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
}
- bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default;
+#else
+ bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( depthClampEnable == rhs.depthClampEnable )
- && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
- && ( polygonMode == rhs.polygonMode )
- && ( cullMode == rhs.cullMode )
- && ( frontFace == rhs.frontFace )
- && ( depthBiasEnable == rhs.depthBiasEnable )
- && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
- && ( depthBiasClamp == rhs.depthBiasClamp )
- && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
- && ( lineWidth == rhs.lineWidth );
+ && ( memory == rhs.memory );
}
- bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
- VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
- VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
- VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
- VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
- VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
- float depthBiasConstantFactor = {};
- float depthBiasClamp = {};
- float depthBiasSlopeFactor = {};
- float lineWidth = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+
};
- static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
- struct PipelineMultisampleStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
+ {
+ using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
+ };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ struct MemoryGetFdInfoKHR
{
- VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {},
- float minSampleShading_ = {},
- const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , rasterizationSamples( rasterizationSamples_ )
- , sampleShadingEnable( sampleShadingEnable_ )
- , minSampleShading( minSampleShading_ )
- , pSampleMask( pSampleMask_ )
- , alphaToCoverageEnable( alphaToCoverageEnable_ )
- , alphaToOneEnable( alphaToOneEnable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ ), handleType( handleType_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
+ return *this;
}
- PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryGetFdInfoKHR ) );
return *this;
}
- PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ memory = memory_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- rasterizationSamples = rasterizationSamples_;
+ handleType = handleType_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- sampleShadingEnable = sampleShadingEnable_;
- return *this;
+ return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
}
- PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- minSampleShading = minSampleShading_;
+ return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryGetFdInfoKHR const& ) const = default;
+#else
+ bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+ };
+ static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
+ {
+ using Type = MemoryGetFdInfoKHR;
+ };
+
+ struct MemoryFdPropertiesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryTypeBits( memoryTypeBits_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
return *this;
}
- PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
+ MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pSampleMask = pSampleMask_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryFdPropertiesKHR ) );
return *this;
}
- PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- alphaToCoverageEnable = alphaToCoverageEnable_;
+ return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
+ }
+
+ operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryFdPropertiesKHR const& ) const = default;
+#else
+ bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
+ void* pNext = {};
+ uint32_t memoryTypeBits = {};
+
+ };
+ static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
+ {
+ using Type = MemoryFdPropertiesKHR;
+ };
+
+ struct MemoryHostPointerPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryTypeBits( memoryTypeBits_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
return *this;
}
- PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+ MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- alphaToOneEnable = alphaToOneEnable_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryHostPointerPropertiesEXT ) );
return *this;
}
- operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
}
- operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
+ return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
}
- bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default;
+#else
+ bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( rasterizationSamples == rhs.rasterizationSamples )
- && ( sampleShadingEnable == rhs.sampleShadingEnable )
- && ( minSampleShading == rhs.minSampleShading )
- && ( pSampleMask == rhs.pSampleMask )
- && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
- && ( alphaToOneEnable == rhs.alphaToOneEnable );
+ && ( memoryTypeBits == rhs.memoryTypeBits );
}
- bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
- float minSampleShading = {};
- const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {};
- VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
- VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+ void* pNext = {};
+ uint32_t memoryTypeBits = {};
+
};
- static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct StencilOpState
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
{
- VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
- VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
- VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
- uint32_t compareMask_ = {},
- uint32_t writeMask_ = {},
- uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT
- : failOp( failOp_ )
- , passOp( passOp_ )
- , depthFailOp( depthFailOp_ )
- , compareOp( compareOp_ )
- , compareMask( compareMask_ )
- , writeMask( writeMask_ )
- , reference( reference_ )
+ using Type = MemoryHostPointerPropertiesEXT;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct MemoryGetWin32HandleInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ ), handleType( handleType_ )
{}
- StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- failOp = failOp_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
return *this;
}
- StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- passOp = passOp_;
+ pNext = pNext_;
return *this;
}
- StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
- depthFailOp = depthFailOp_;
+ memory = memory_;
return *this;
}
- StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- compareOp = compareOp_;
+ handleType = handleType_;
return *this;
}
- StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- compareMask = compareMask_;
+ return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
+ }
+
+ operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+ };
+ static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
+ {
+ using Type = MemoryGetWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct MemoryWin32HandlePropertiesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryTypeBits( memoryTypeBits_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
return *this;
}
- StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
+ MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- writeMask = writeMask_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryWin32HandlePropertiesKHR ) );
return *this;
}
- StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- reference = reference_;
+ return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
+ }
+
+ operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default;
+#else
+ bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
+ void* pNext = {};
+ uint32_t memoryTypeBits = {};
+
+ };
+ static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
+ {
+ using Type = MemoryWin32HandlePropertiesKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct PastPresentationTimingGOOGLE
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT
+ : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ), actualPresentTime( actualPresentTime_ ), earliestPresentTime( earliestPresentTime_ ), presentMargin( presentMargin_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
return *this;
}
- operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT
+ PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkStencilOpState*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PastPresentationTimingGOOGLE ) );
+ return *this;
}
- operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkStencilOpState*>( this );
+ return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
}
- bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
{
- return ( failOp == rhs.failOp )
- && ( passOp == rhs.passOp )
- && ( depthFailOp == rhs.depthFailOp )
- && ( compareOp == rhs.compareOp )
- && ( compareMask == rhs.compareMask )
- && ( writeMask == rhs.writeMask )
- && ( reference == rhs.reference );
+ return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
}
- bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default;
+#else
+ bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( presentID == rhs.presentID )
+ && ( desiredPresentTime == rhs.desiredPresentTime )
+ && ( actualPresentTime == rhs.actualPresentTime )
+ && ( earliestPresentTime == rhs.earliestPresentTime )
+ && ( presentMargin == rhs.presentMargin );
+ }
+
+ bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
- VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
- VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
- uint32_t compareMask = {};
- uint32_t writeMask = {};
- uint32_t reference = {};
+ uint32_t presentID = {};
+ uint64_t desiredPresentTime = {};
+ uint64_t actualPresentTime = {};
+ uint64_t earliestPresentTime = {};
+ uint64_t presentMargin = {};
+
};
- static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
- struct PipelineDepthStencilStateCreateInfo
+ union PerformanceValueDataINTEL
{
- VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {},
- VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
- VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {},
- VULKAN_HPP_NAMESPACE::StencilOpState front_ = {},
- VULKAN_HPP_NAMESPACE::StencilOpState back_ = {},
- float minDepthBounds_ = {},
- float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , depthTestEnable( depthTestEnable_ )
- , depthWriteEnable( depthWriteEnable_ )
- , depthCompareOp( depthCompareOp_ )
- , depthBoundsTestEnable( depthBoundsTestEnable_ )
- , stencilTestEnable( stencilTestEnable_ )
- , front( front_ )
- , back( back_ )
- , minDepthBounds( minDepthBounds_ )
- , maxDepthBounds( maxDepthBounds_ )
+ PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
+ }
+
+ PerformanceValueDataINTEL( uint32_t value32_ = {} )
+ : value32( value32_ )
+ {}
+
+ PerformanceValueDataINTEL( uint64_t value64_ )
+ : value64( value64_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueDataINTEL( float valueFloat_ )
+ : valueFloat( valueFloat_ )
+ {}
+
+ PerformanceValueDataINTEL( const char* valueString_ )
+ : valueString( valueString_ )
+ {}
+
+ PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) );
+ value32 = value32_;
return *this;
}
- PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ value64 = value64_;
+ return *this;
}
- PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>(&rhs);
+ valueFloat = valueFloat_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ valueBool = valueBool_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ valueString = valueString_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- depthTestEnable = depthTestEnable_;
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceValueDataINTEL const&() const
{
- depthWriteEnable = depthWriteEnable_;
+ return *reinterpret_cast<const VkPerformanceValueDataINTEL*>(this);
+ }
+
+ operator VkPerformanceValueDataINTEL &()
+ {
+ return *reinterpret_cast<VkPerformanceValueDataINTEL*>(this);
+ }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ uint32_t value32;
+ uint64_t value64;
+ float valueFloat;
+ VULKAN_HPP_NAMESPACE::Bool32 valueBool;
+ const char* valueString;
+#else
+ uint32_t value32;
+ uint64_t value64;
+ float valueFloat;
+ VkBool32 valueBool;
+ const char* valueString;
+#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+ };
+
+ struct PerformanceValueINTEL
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), data( data_ )
+ {}
+
+ PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- depthCompareOp = depthCompareOp_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceValueINTEL ) );
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
- depthBoundsTestEnable = depthBoundsTestEnable_;
+ type = type_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
{
- stencilTestEnable = stencilTestEnable_;
+ data = data_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState front_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT
{
- front = front_;
+ return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
+ }
+
+ operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
+ }
+
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
+ VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
+
+ };
+ static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
+
+ struct PipelineExecutableInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipeline( pipeline_ ), executableIndex( executableIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
return *this;
}
- PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState back_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- back = back_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutableInfoKHR ) );
return *this;
}
- PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- minDepthBounds = minDepthBounds_;
+ pNext = pNext_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
- maxDepthBounds = maxDepthBounds_;
+ pipeline = pipeline_;
return *this;
}
- operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
+ executableIndex = executableIndex_;
+ return *this;
}
- operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
}
- bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineExecutableInfoKHR const& ) const = default;
+#else
+ bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( depthTestEnable == rhs.depthTestEnable )
- && ( depthWriteEnable == rhs.depthWriteEnable )
- && ( depthCompareOp == rhs.depthCompareOp )
- && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
- && ( stencilTestEnable == rhs.stencilTestEnable )
- && ( front == rhs.front )
- && ( back == rhs.back )
- && ( minDepthBounds == rhs.minDepthBounds )
- && ( maxDepthBounds == rhs.maxDepthBounds );
+ && ( pipeline == rhs.pipeline )
+ && ( executableIndex == rhs.executableIndex );
}
- bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
- VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
- VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
- VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
- VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
- VULKAN_HPP_NAMESPACE::StencilOpState front = {};
- VULKAN_HPP_NAMESPACE::StencilOpState back = {};
- float minDepthBounds = {};
- float maxDepthBounds = {};
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ uint32_t executableIndex = {};
+
};
- static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct PipelineColorBlendAttachmentState
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
{
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {},
- VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
- VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
- VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : blendEnable( blendEnable_ )
- , srcColorBlendFactor( srcColorBlendFactor_ )
- , dstColorBlendFactor( dstColorBlendFactor_ )
- , colorBlendOp( colorBlendOp_ )
- , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
- , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
- , alphaBlendOp( alphaBlendOp_ )
- , colorWriteMask( colorWriteMask_ )
+ using Type = PipelineExecutableInfoKHR;
+ };
+
+ struct PipelineExecutableInternalRepresentationKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+ : name( name_ ), description( description_ ), isText( isText_ ), dataSize( dataSize_ ), pData( pData_ )
{}
- PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
+ : name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
return *this;
}
- PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- blendEnable = blendEnable_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutableInternalRepresentationKHR ) );
return *this;
}
- PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT
{
- srcColorBlendFactor = srcColorBlendFactor_;
- return *this;
+ return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
}
- PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
{
- dstColorBlendFactor = dstColorBlendFactor_;
+ return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default;
+#else
+ bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( name == rhs.name )
+ && ( description == rhs.description )
+ && ( isText == rhs.isText )
+ && ( dataSize == rhs.dataSize )
+ && ( pData == rhs.pData );
+ }
+
+ bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ VULKAN_HPP_NAMESPACE::Bool32 isText = {};
+ size_t dataSize = {};
+ void* pData = {};
+
+ };
+ static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
+ {
+ using Type = PipelineExecutableInternalRepresentationKHR;
+ };
+
+ struct PipelineInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipeline( pipeline_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
return *this;
}
- PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- colorBlendOp = colorBlendOp_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineInfoKHR ) );
return *this;
}
- PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- srcAlphaBlendFactor = srcAlphaBlendFactor_;
+ pNext = pNext_;
return *this;
}
- PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
- dstAlphaBlendFactor = dstAlphaBlendFactor_;
+ pipeline = pipeline_;
return *this;
}
- PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- alphaBlendOp = alphaBlendOp_;
+ return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
+ }
+
+ operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineInfoKHR const& ) const = default;
+#else
+ bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pipeline == rhs.pipeline );
+ }
+
+ bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+
+ };
+ static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineInfoKHR>
+ {
+ using Type = PipelineInfoKHR;
+ };
+
+ struct PipelineExecutablePropertiesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, uint32_t subgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : stages( stages_ ), name( name_ ), description( description_ ), subgroupSize( subgroupSize_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
return *this;
}
- PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- colorWriteMask = colorWriteMask_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutablePropertiesKHR ) );
return *this;
}
- operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
+ return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
}
- operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
+ return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
}
- bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default;
+#else
+ bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( blendEnable == rhs.blendEnable )
- && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
- && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
- && ( colorBlendOp == rhs.colorBlendOp )
- && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
- && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
- && ( alphaBlendOp == rhs.alphaBlendOp )
- && ( colorWriteMask == rhs.colorWriteMask );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( stages == rhs.stages )
+ && ( name == rhs.name )
+ && ( description == rhs.description )
+ && ( subgroupSize == rhs.subgroupSize );
}
- bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
- VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
- VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
- VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
- VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
- VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
- VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
- VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ uint32_t subgroupSize = {};
+
};
- static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- struct PipelineColorBlendStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
+ {
+ using Type = PipelineExecutablePropertiesKHR;
+ };
+
+ union PipelineExecutableStatisticValueKHR
{
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {},
- VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {},
- std::array<float,4> const& blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , logicOpEnable( logicOpEnable_ )
- , logicOp( logicOp_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , blendConstants{}
+ PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,4,4>::copy( blendConstants, blendConstants_ );
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
}
- VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} )
+ : b32( b32_ )
+ {}
+
+ PipelineExecutableStatisticValueKHR( int64_t i64_ )
+ : i64( i64_ )
+ {}
+
+ PipelineExecutableStatisticValueKHR( uint64_t u64_ )
+ : u64( u64_ )
+ {}
+
+ PipelineExecutableStatisticValueKHR( double f64_ )
+ : f64( f64_ )
+ {}
+
+ PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) );
+ b32 = b32_;
return *this;
}
- PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ i64 = i64_;
+ return *this;
+ }
+
+ PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ u64 = u64_;
+ return *this;
+ }
+
+ PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ f64 = f64_;
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
+ return *this;
+ }
+
+ operator VkPipelineExecutableStatisticValueKHR const&() const
+ {
+ return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>(this);
+ }
+
+ operator VkPipelineExecutableStatisticValueKHR &()
+ {
+ return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>(this);
+ }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ VULKAN_HPP_NAMESPACE::Bool32 b32;
+ int64_t i64;
+ uint64_t u64;
+ double f64;
+#else
+ VkBool32 b32;
+ int64_t i64;
+ uint64_t u64;
+ double f64;
+#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+ };
+
+ struct PipelineExecutableStatisticKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ PipelineExecutableStatisticKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}) VULKAN_HPP_NOEXCEPT
+ : name( name_ ), description( description_ ), format( format_ ), value( value_ )
+ {}
+
+ PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
return *this;
}
- PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutableStatisticKHR ) );
return *this;
}
- PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
+ }
+
+ operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
+ }
+
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
+
+ };
+ static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
+ {
+ using Type = PipelineExecutableStatisticKHR;
+ };
+
+ struct RefreshCycleDurationGOOGLE
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT
+ : refreshDuration( refreshDuration_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
return *this;
}
- PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+ RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- logicOpEnable = logicOpEnable_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RefreshCycleDurationGOOGLE ) );
return *this;
}
- PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT
{
- logicOp = logicOp_;
+ return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
+ }
+
+ operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default;
+#else
+ bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( refreshDuration == rhs.refreshDuration );
+ }
+
+ bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint64_t refreshDuration = {};
+
+ };
+ static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+ struct SemaphoreGetFdInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ ), handleType( handleType_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
return *this;
}
- PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- attachmentCount = attachmentCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
return *this;
}
- PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- pAttachments = pAttachments_;
+ pNext = pNext_;
return *this;
}
- PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
+ semaphore = semaphore_;
return *this;
}
- operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
+ handleType = handleType_;
+ return *this;
}
- operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+
+ operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
}
- bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default;
+#else
+ bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( logicOpEnable == rhs.logicOpEnable )
- && ( logicOp == rhs.logicOp )
- && ( attachmentCount == rhs.attachmentCount )
- && ( pAttachments == rhs.pAttachments )
- && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
+ && ( semaphore == rhs.semaphore )
+ && ( handleType == rhs.handleType );
}
- bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
- VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
- uint32_t attachmentCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {};
- float blendConstants[4] = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
};
- static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct PipelineDynamicStateCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
+ {
+ using Type = SemaphoreGetFdInfoKHR;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct SemaphoreGetWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {},
- uint32_t dynamicStateCount_ = {},
- const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , dynamicStateCount( dynamicStateCount_ )
- , pDynamicStates( pDynamicStates_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ ), handleType( handleType_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
+ return *this;
+ }
+
+ SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+
+ operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
+ }
+
+ operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+ };
+ static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
+ {
+ using Type = SemaphoreGetWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ImportFenceFdInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
return *this;
}
- PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportFenceFdInfoKHR ) );
+ return *this;
+ }
+
+ ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- dynamicStateCount = dynamicStateCount_;
+ handleType = handleType_;
return *this;
}
- PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
{
- pDynamicStates = pDynamicStates_;
+ fd = fd_;
return *this;
}
- operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
+ return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
}
- operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
+ return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
}
- bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportFenceFdInfoKHR const& ) const = default;
+#else
+ bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
&& ( flags == rhs.flags )
- && ( dynamicStateCount == rhs.dynamicStateCount )
- && ( pDynamicStates == rhs.pDynamicStates );
+ && ( handleType == rhs.handleType )
+ && ( fd == rhs.fd );
}
- bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
- uint32_t dynamicStateCount = {};
- const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
+
};
- static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct GraphicsPipelineCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
+ {
+ using Type = ImportFenceFdInfoKHR;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportFenceWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- uint32_t stageCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t subpass_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stageCount( stageCount_ )
- , pStages( pStages_ )
- , pVertexInputState( pVertexInputState_ )
- , pInputAssemblyState( pInputAssemblyState_ )
- , pTessellationState( pTessellationState_ )
- , pViewportState( pViewportState_ )
- , pRasterizationState( pRasterizationState_ )
- , pMultisampleState( pMultisampleState_ )
- , pDepthStencilState( pDepthStencilState_ )
- , pColorBlendState( pColorBlendState_ )
- , pDynamicState( pDynamicState_ )
- , layout( layout_ )
- , renderPass( renderPass_ )
- , subpass( subpass_ )
- , basePipelineHandle( basePipelineHandle_ )
- , basePipelineIndex( basePipelineIndex_ )
- {}
-
- VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) );
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ handle = handle_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ {
+ name = name_;
+ return *this;
+ }
+
+
+ operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
+ }
+
+ operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
+
+ };
+ static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
+ {
+ using Type = ImportFenceWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ImportSemaphoreFdInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
return *this;
}
- GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
+ return *this;
+ }
+
+ ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- stageCount = stageCount_;
+ handleType = handleType_;
return *this;
}
- GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
{
- pStages = pStages_;
+ fd = fd_;
return *this;
}
- GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- pVertexInputState = pVertexInputState_;
+ return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
+ }
+
+ operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default;
+#else
+ bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( fd == rhs.fd );
+ }
+
+ bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
+
+ };
+ static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
+ {
+ using Type = ImportSemaphoreFdInfoKHR;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportSemaphoreWin32HandleInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pInputAssemblyState = pInputAssemblyState_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
return *this;
}
- GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- pTessellationState = pTessellationState_;
+ pNext = pNext_;
return *this;
}
- GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
- pViewportState = pViewportState_;
+ semaphore = semaphore_;
return *this;
}
- GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- pRasterizationState = pRasterizationState_;
+ flags = flags_;
return *this;
}
- GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- pMultisampleState = pMultisampleState_;
+ handleType = handleType_;
return *this;
}
- GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
- pDepthStencilState = pDepthStencilState_;
+ handle = handle_;
return *this;
}
- GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
- pColorBlendState = pColorBlendState_;
+ name = name_;
return *this;
}
- GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- pDynamicState = pDynamicState_;
+ return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
+ }
+
+ operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
+
+ };
+ static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
+ {
+ using Type = ImportSemaphoreWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct InitializePerformanceApiInfoINTEL
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+ : pUserData( pUserData_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
return *this;
}
- GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout = layout_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( InitializePerformanceApiInfoINTEL ) );
return *this;
}
- GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- renderPass = renderPass_;
+ pNext = pNext_;
return *this;
}
- GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+ InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
{
- subpass = subpass_;
+ pUserData = pUserData_;
return *this;
}
- GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
{
- basePipelineHandle = basePipelineHandle_;
+ return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
+ }
+
+ operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default;
+#else
+ bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pUserData == rhs.pUserData );
+ }
+
+ bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
+ const void* pNext = {};
+ void* pUserData = {};
+
+ };
+ static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
+ {
+ using Type = InitializePerformanceApiInfoINTEL;
+ };
+
+ struct DisplayEventInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut) VULKAN_HPP_NOEXCEPT
+ : displayEvent( displayEvent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
return *this;
}
- GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- basePipelineIndex = basePipelineIndex_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayEventInfoEXT ) );
return *this;
}
- operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
+ pNext = pNext_;
+ return *this;
}
- operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+ DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
+ displayEvent = displayEvent_;
+ return *this;
}
- bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
+ }
+
+ operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayEventInfoEXT const& ) const = default;
+#else
+ bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( stageCount == rhs.stageCount )
- && ( pStages == rhs.pStages )
- && ( pVertexInputState == rhs.pVertexInputState )
- && ( pInputAssemblyState == rhs.pInputAssemblyState )
- && ( pTessellationState == rhs.pTessellationState )
- && ( pViewportState == rhs.pViewportState )
- && ( pRasterizationState == rhs.pRasterizationState )
- && ( pMultisampleState == rhs.pMultisampleState )
- && ( pDepthStencilState == rhs.pDepthStencilState )
- && ( pColorBlendState == rhs.pColorBlendState )
- && ( pDynamicState == rhs.pDynamicState )
- && ( layout == rhs.layout )
- && ( renderPass == rhs.renderPass )
- && ( subpass == rhs.subpass )
- && ( basePipelineHandle == rhs.basePipelineHandle )
- && ( basePipelineIndex == rhs.basePipelineIndex );
+ && ( displayEvent == rhs.displayEvent );
}
- bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
- uint32_t stageCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
- const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {};
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
- const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {};
- const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {};
- const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {};
- const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {};
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {};
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {};
- VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
- VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
- uint32_t subpass = {};
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
- int32_t basePipelineIndex = {};
+ VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
+
+ };
+ static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
+ {
+ using Type = DisplayEventInfoEXT;
};
- static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct XYColorEXT
{
- VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {},
- float y_ = {} ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ )
{}
+ VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
+ return *this;
+ }
+
+ XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( XYColorEXT ) );
return *this;
}
@@ -35750,6 +46920,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXYColorEXT*>( this );
@@ -35760,6 +46931,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkXYColorEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( XYColorEXT const& ) const = default;
+#else
bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( x == rhs.x )
@@ -35770,48 +46945,45 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
float x = {};
float y = {};
+
};
static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<XYColorEXT>::value, "struct wrapper is not a standard layout!" );
struct HdrMetadataEXT
{
- VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {},
- VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {},
- VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {},
- VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {},
- float maxLuminance_ = {},
- float minLuminance_ = {},
- float maxContentLightLevel_ = {},
- float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayPrimaryRed( displayPrimaryRed_ )
- , displayPrimaryGreen( displayPrimaryGreen_ )
- , displayPrimaryBlue( displayPrimaryBlue_ )
- , whitePoint( whitePoint_ )
- , maxLuminance( maxLuminance_ )
- , minLuminance( minLuminance_ )
- , maxContentLightLevel( maxContentLightLevel_ )
- , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}) VULKAN_HPP_NOEXCEPT
+ : displayPrimaryRed( displayPrimaryRed_ ), displayPrimaryGreen( displayPrimaryGreen_ ), displayPrimaryBlue( displayPrimaryBlue_ ), whitePoint( whitePoint_ ), maxLuminance( maxLuminance_ ), minLuminance( minLuminance_ ), maxContentLightLevel( maxContentLightLevel_ ), maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
{}
- VULKAN_HPP_NAMESPACE::HdrMetadataEXT & operator=( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) - offsetof( HdrMetadataEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
+ return *this;
+ }
- HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( HdrMetadataEXT ) );
return *this;
}
@@ -35821,25 +46993,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryRed = displayPrimaryRed_;
return *this;
}
- HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryGreen = displayPrimaryGreen_;
return *this;
}
- HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryBlue = displayPrimaryBlue_;
return *this;
}
- HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
{
whitePoint = whitePoint_;
return *this;
@@ -35869,6 +47041,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
@@ -35879,6 +47052,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkHdrMetadataEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( HdrMetadataEXT const& ) const = default;
+#else
bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -35897,6 +47074,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
@@ -35909,5997 +47089,13613 @@ namespace VULKAN_HPP_NAMESPACE
float minLuminance = {};
float maxContentLightLevel = {};
float maxFrameAverageLightLevel = {};
+
};
static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
- struct HeadlessSurfaceCreateInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eHdrMetadataEXT>
{
- VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ using Type = HdrMetadataEXT;
+ };
+
+ struct SemaphoreSignalInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ ), value( value_ )
{}
- VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
+ return *this;
}
- HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreSignalInfo ) );
return *this;
}
- HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ semaphore = semaphore_;
return *this;
}
- operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
+ value = value_;
+ return *this;
}
- operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+
+ operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
}
- bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreSignalInfo const& ) const = default;
+#else
+ bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags );
+ && ( semaphore == rhs.semaphore )
+ && ( value == rhs.value );
}
- bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ uint64_t value = {};
+
};
- static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_IOS_MVK
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
+ {
+ using Type = SemaphoreSignalInfo;
+ };
+ using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
- struct IOSSurfaceCreateInfoMVK
+ struct SemaphoreWaitInfo
{
- VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {},
- const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pView( pView_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, const uint64_t* pValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), semaphoreCount( semaphoreCount_ ), pSemaphores( pSemaphores_ ), pValues( pValues_ )
{}
- VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) - offsetof( IOSSurfaceCreateInfoMVK, pNext ) );
- return *this;
+ *this = rhs;
}
- IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {} )
+ : flags( flags_ ), semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() )
{
- *this = rhs;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
+#else
+ if ( semaphores_.size() != values_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
return *this;
}
- IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreWaitInfo ) );
+ return *this;
+ }
+
+ SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
- pView = pView_;
+ semaphoreCount = semaphoreCount_;
return *this;
}
- operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
+ pSemaphores = pSemaphores_;
+ return *this;
}
- operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
+ semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
+ pSemaphores = semaphores_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pValues = pValues_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
+ {
+ semaphoreCount = static_cast<uint32_t>( values_.size() );
+ pValues = values_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
+ }
+
+ operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreWaitInfo const& ) const = default;
+#else
+ bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( pView == rhs.pView );
+ && ( semaphoreCount == rhs.semaphoreCount )
+ && ( pSemaphores == rhs.pSemaphores )
+ && ( pValues == rhs.pValues );
}
- bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
- const void* pView = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
+ uint32_t semaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {};
+ const uint64_t* pValues = {};
+
};
- static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
+ static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
- struct ImageBlit
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
{
- VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
- std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
- std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ )
- , srcOffsets{}
- , dstSubresource( dstSubresource_ )
- , dstOffsets{}
+ using Type = SemaphoreWaitInfo;
+ };
+ using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ class Device;
+ template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueAccelerationStructureKHR = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Buffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<BufferView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<CommandBuffer, Dispatch> { public: using deleter = PoolFree<Device, CommandPool, Dispatch>; };
+ using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<CommandPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <typename Dispatch> class UniqueHandleTraits<DeferredOperationKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueDeferredOperationKHR = UniqueHandle<DeferredOperationKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ template <typename Dispatch> class UniqueHandleTraits<DescriptorPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<DescriptorSet, Dispatch> { public: using deleter = PoolFree<Device, DescriptorPool, Dispatch>; };
+ using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<DescriptorSetLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<DeviceMemory, Dispatch> { public: using deleter = ObjectFree<Device, Dispatch>; };
+ using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Event, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Fence, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Framebuffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Image, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<ImageView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<IndirectCommandsLayoutNV, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Pipeline, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<PipelineCache, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<PipelineLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<PrivateDataSlotEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniquePrivateDataSlotEXT = UniqueHandle<PrivateDataSlotEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<QueryPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<RenderPass, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Sampler, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<Semaphore, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<ShaderModule, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<SwapchainKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<ValidationCacheEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+ using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ class Device
+ {
+ public:
+ using CType = VkDevice;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT
+ : m_device(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_device(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT
+ : m_device( device )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT
+ {
+ m_device = device;
+ return *this;
+ }
+#endif
+
+ Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::Offset3D,2,2>::copy( srcOffsets, srcOffsets_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::Offset3D,2,2>::copy( dstOffsets, dstOffsets_ );
+ m_device = VK_NULL_HANDLE;
+ return *this;
}
- ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Device const& ) const = default;
+#else
+ bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ return m_device == rhs.m_device;
}
- ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>(&rhs);
- return *this;
+ return m_device != rhs.m_device;
}
- ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- srcSubresource = srcSubresource_;
+ return m_device < rhs.m_device;
+ }
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const &pOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<AccelerationStructureKHR,Dispatch>>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline,Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<DeferredOperationKHR,Dispatch>>::type createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Event,Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline,Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Image,Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNV,Dispatch>>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<PrivateDataSlotEXT,Dispatch>>::type createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline,Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline,Allocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline,Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename Allocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const &memoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const &descriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const &descriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const &timestampInfos, ArrayProxy<uint64_t> const &timestamps, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type = 0>
+ std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+ std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+ std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<T,Allocator>> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<T> getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const &memoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void*>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const &srcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const &srcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const &fences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const &swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const &metadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const &descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const &descriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const &fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy<T> const &data, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_device;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_device != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_device == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDevice m_device;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDevice>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Device;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDevice>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Device;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Device;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Device>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct DisplayModeParametersKHR
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT
+ : visibleRegion( visibleRegion_ ), refreshRate( refreshRate_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
return *this;
}
- ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModeParametersKHR ) );
return *this;
}
- ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
{
- dstSubresource = dstSubresource_;
+ visibleRegion = visibleRegion_;
return *this;
}
- ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) );
+ refreshRate = refreshRate_;
return *this;
}
- operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageBlit*>( this );
+ return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
}
- operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageBlit*>( this );
+ return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
}
- bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayModeParametersKHR const& ) const = default;
+#else
+ bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( srcSubresource == rhs.srcSubresource )
- && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 )
- && ( dstSubresource == rhs.dstSubresource )
- && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 );
+ return ( visibleRegion == rhs.visibleRegion )
+ && ( refreshRate == rhs.refreshRate );
}
- bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D srcOffsets[2] = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D dstOffsets[2] = {};
+ VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
+ uint32_t refreshRate = {};
+
};
- static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageCopy
+ struct DisplayModeCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ )
- , srcOffset( srcOffset_ )
- , dstSubresource( dstSubresource_ )
- , dstOffset( dstOffset_ )
- , extent( extent_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), parameters( parameters_ )
{}
- ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
return *this;
}
- ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- srcSubresource = srcSubresource_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModeCreateInfoKHR ) );
return *this;
}
- ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- srcOffset = srcOffset_;
+ pNext = pNext_;
return *this;
}
- ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- dstSubresource = dstSubresource_;
+ flags = flags_;
return *this;
}
- ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
{
- dstOffset = dstOffset_;
+ parameters = parameters_;
return *this;
}
- ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
- {
- extent = extent_;
- return *this;
- }
- operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageCopy*>( this );
+ return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
}
- operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageCopy*>( this );
+ return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
}
- bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( srcSubresource == rhs.srcSubresource )
- && ( srcOffset == rhs.srcOffset )
- && ( dstSubresource == rhs.dstSubresource )
- && ( dstOffset == rhs.dstOffset )
- && ( extent == rhs.extent );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( parameters == rhs.parameters );
}
- bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
- VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+
};
- static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
{
- VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
- uint32_t mipLevels_ = {},
- uint32_t arrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t* pQueueFamilyIndices_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , imageType( imageType_ )
- , format( format_ )
- , extent( extent_ )
- , mipLevels( mipLevels_ )
- , arrayLayers( arrayLayers_ )
- , samples( samples_ )
- , tiling( tiling_ )
- , usage( usage_ )
- , sharingMode( sharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- , initialLayout( initialLayout_ )
- {}
-
- VULKAN_HPP_NAMESPACE::ImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) - offsetof( ImageCreateInfo, pNext ) );
+ using Type = DisplayModeCreateInfoKHR;
+ };
+
+ class DisplayModeKHR
+ {
+ public:
+ using CType = VkDisplayModeKHR;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT
+ : m_displayModeKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_displayModeKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
+ : m_displayModeKHR( displayModeKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT
+ {
+ m_displayModeKHR = displayModeKHR;
return *this;
}
+#endif
- ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_displayModeKHR = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayModeKHR const& ) const = default;
+#else
+ bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayModeKHR == rhs.m_displayModeKHR;
+ }
+
+ bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayModeKHR != rhs.m_displayModeKHR;
+ }
+
+ bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayModeKHR < rhs.m_displayModeKHR;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayModeKHR;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayModeKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_displayModeKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDisplayModeKHR m_displayModeKHR;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayModeKHR>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct ExtensionProperties
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT
+ : extensionName( extensionName_ ), specVersion( specVersion_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
return *this;
}
- ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExtensionProperties ) );
return *this;
}
- ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
- return *this;
+ return *reinterpret_cast<const VkExtensionProperties*>( this );
}
- ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
+ operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
{
- imageType = imageType_;
- return *this;
+ return *reinterpret_cast<VkExtensionProperties*>( this );
}
- ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExtensionProperties const& ) const = default;
+#else
+ bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- format = format_;
- return *this;
+ return ( extensionName == rhs.extensionName )
+ && ( specVersion == rhs.specVersion );
}
- ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- extent = extent_;
- return *this;
+ return !operator==( rhs );
}
+#endif
- ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
+ uint32_t specVersion = {};
+
+ };
+ static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct LayerProperties
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}) VULKAN_HPP_NOEXCEPT
+ : layerName( layerName_ ), specVersion( specVersion_ ), implementationVersion( implementationVersion_ ), description( description_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- mipLevels = mipLevels_;
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
+ LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- arrayLayers = arrayLayers_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
return *this;
}
- ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- samples = samples_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( LayerProperties ) );
return *this;
}
- ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT
{
- tiling = tiling_;
- return *this;
+ return *reinterpret_cast<const VkLayerProperties*>( this );
}
- ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
{
- usage = usage_;
- return *this;
+ return *reinterpret_cast<VkLayerProperties*>( this );
}
- ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( LayerProperties const& ) const = default;
+#else
+ bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- sharingMode = sharingMode_;
+ return ( layerName == rhs.layerName )
+ && ( specVersion == rhs.specVersion )
+ && ( implementationVersion == rhs.implementationVersion )
+ && ( description == rhs.description );
+ }
+
+ bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
+ uint32_t specVersion = {};
+ uint32_t implementationVersion = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+
+ };
+ static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PerformanceCounterKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array<uint8_t,VK_UUID_SIZE> const& uuid_ = {}) VULKAN_HPP_NOEXCEPT
+ : unit( unit_ ), scope( scope_ ), storage( storage_ ), uuid( uuid_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
return *this;
}
- ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- queueFamilyIndexCount = queueFamilyIndexCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceCounterKHR ) );
return *this;
}
- ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT
{
- pQueueFamilyIndices = pQueueFamilyIndices_;
+ return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
+ }
+
+ operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceCounterKHR const& ) const = default;
+#else
+ bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( unit == rhs.unit )
+ && ( scope == rhs.scope )
+ && ( storage == rhs.storage )
+ && ( uuid == rhs.uuid );
+ }
+
+ bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
+
+ };
+ static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
+ {
+ using Type = PerformanceCounterKHR;
+ };
+
+ struct PerformanceCounterDescriptionKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& category_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), name( name_ ), category( category_ ), description( description_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
return *this;
}
- ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- initialLayout = initialLayout_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceCounterDescriptionKHR ) );
return *this;
}
- operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageCreateInfo*>( this );
+ return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
}
- operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageCreateInfo*>( this );
+ return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
}
- bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceCounterDescriptionKHR const& ) const = default;
+#else
+ bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( imageType == rhs.imageType )
- && ( format == rhs.format )
- && ( extent == rhs.extent )
- && ( mipLevels == rhs.mipLevels )
- && ( arrayLayers == rhs.arrayLayers )
- && ( samples == rhs.samples )
- && ( tiling == rhs.tiling )
- && ( usage == rhs.usage )
- && ( sharingMode == rhs.sharingMode )
- && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
- && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
- && ( initialLayout == rhs.initialLayout );
+ && ( name == rhs.name )
+ && ( category == rhs.category )
+ && ( description == rhs.description );
}
- bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::Extent3D extent = {};
- uint32_t mipLevels = {};
- uint32_t arrayLayers = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
- uint32_t queueFamilyIndexCount = {};
- const uint32_t* pQueueFamilyIndices = {};
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+
};
- static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
- struct SubresourceLayout
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
{
- SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT
- : offset( offset_ )
- , size( size_ )
- , rowPitch( rowPitch_ )
- , arrayPitch( arrayPitch_ )
- , depthPitch( depthPitch_ )
+ using Type = PerformanceCounterDescriptionKHR;
+ };
+
+ struct DisplayModePropertiesKHR
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
+ : displayMode( displayMode_ ), parameters( parameters_ )
{}
- SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
return *this;
}
- operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT
+ DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubresourceLayout*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModePropertiesKHR ) );
+ return *this;
}
- operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubresourceLayout*>( this );
+ return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
}
- bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
- return ( offset == rhs.offset )
- && ( size == rhs.size )
- && ( rowPitch == rhs.rowPitch )
- && ( arrayPitch == rhs.arrayPitch )
- && ( depthPitch == rhs.depthPitch );
+ return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
}
- bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayModePropertiesKHR const& ) const = default;
+#else
+ bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( displayMode == rhs.displayMode )
+ && ( parameters == rhs.parameters );
+ }
+
+ bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
- VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
- VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
- VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+
};
- static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageDrmFormatModifierExplicitCreateInfoEXT
+ struct DisplayModeProperties2KHR
{
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {},
- uint32_t drmFormatModifierPlaneCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
- , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
- , pPlaneLayouts( pPlaneLayouts_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : displayModeProperties( displayModeProperties_ )
{}
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
return *this;
}
- ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModeProperties2KHR ) );
+ return *this;
+ }
+
+
+ operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
+ }
+
+ operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayModeProperties2KHR const& ) const = default;
+#else
+ bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( displayModeProperties == rhs.displayModeProperties );
+ }
+
+ bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
+
+ };
+ static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
+ {
+ using Type = DisplayModeProperties2KHR;
+ };
+
+ struct DisplayPlaneInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : mode( mode_ ), planeIndex( planeIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
return *this;
}
- ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneInfo2KHR ) );
return *this;
}
- ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- drmFormatModifier = drmFormatModifier_;
+ pNext = pNext_;
return *this;
}
- ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
- drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
+ mode = mode_;
return *this;
}
- ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
{
- pPlaneLayouts = pPlaneLayouts_;
+ planeIndex = planeIndex_;
return *this;
}
- operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
}
- operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
}
- bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default;
+#else
+ bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( drmFormatModifier == rhs.drmFormatModifier )
- && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
- && ( pPlaneLayouts == rhs.pPlaneLayouts );
+ && ( mode == rhs.mode )
+ && ( planeIndex == rhs.planeIndex );
}
- bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
const void* pNext = {};
- uint64_t drmFormatModifier = {};
- uint32_t drmFormatModifierPlaneCount = {};
- const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
+ uint32_t planeIndex = {};
+
};
- static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageDrmFormatModifierListCreateInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
{
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {},
- const uint64_t* pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifierCount( drmFormatModifierCount_ )
- , pDrmFormatModifiers( pDrmFormatModifiers_ )
+ using Type = DisplayPlaneInfo2KHR;
+ };
+
+ struct DisplayPlaneCapabilitiesKHR
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT
+ : supportedAlpha( supportedAlpha_ ), minSrcPosition( minSrcPosition_ ), maxSrcPosition( maxSrcPosition_ ), minSrcExtent( minSrcExtent_ ), maxSrcExtent( maxSrcExtent_ ), minDstPosition( minDstPosition_ ), maxDstPosition( maxDstPosition_ ), minDstExtent( minDstExtent_ ), maxDstExtent( maxDstExtent_ )
{}
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
return *this;
}
- ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneCapabilitiesKHR ) );
+ return *this;
+ }
+
+
+ operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
+ }
+
+ operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default;
+#else
+ bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( supportedAlpha == rhs.supportedAlpha )
+ && ( minSrcPosition == rhs.minSrcPosition )
+ && ( maxSrcPosition == rhs.maxSrcPosition )
+ && ( minSrcExtent == rhs.minSrcExtent )
+ && ( maxSrcExtent == rhs.maxSrcExtent )
+ && ( minDstPosition == rhs.minDstPosition )
+ && ( maxDstPosition == rhs.maxDstPosition )
+ && ( minDstExtent == rhs.minDstExtent )
+ && ( maxDstExtent == rhs.maxDstExtent );
+ }
+
+ bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
+ VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
+ VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
+ VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
+ VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
+
+ };
+ static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct DisplayPlaneCapabilities2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}) VULKAN_HPP_NOEXCEPT
+ : capabilities( capabilities_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
return *this;
}
- ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneCapabilities2KHR ) );
return *this;
}
- ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- drmFormatModifierCount = drmFormatModifierCount_;
+ return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
+ }
+
+ operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default;
+#else
+ bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( capabilities == rhs.capabilities );
+ }
+
+ bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
+
+ };
+ static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
+ {
+ using Type = DisplayPlaneCapabilities2KHR;
+ };
+
+ struct DisplayPlanePropertiesKHR
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : currentDisplay( currentDisplay_ ), currentStackIndex( currentStackIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
return *this;
}
- ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pDrmFormatModifiers = pDrmFormatModifiers_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlanePropertiesKHR ) );
return *this;
}
- operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
}
- operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
}
- bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default;
+#else
+ bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( currentDisplay == rhs.currentDisplay )
+ && ( currentStackIndex == rhs.currentStackIndex );
+ }
+
+ bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
+ uint32_t currentStackIndex = {};
+
+ };
+ static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct DisplayPlaneProperties2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : displayPlaneProperties( displayPlaneProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
+ return *this;
+ }
+
+ DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneProperties2KHR ) );
+ return *this;
+ }
+
+
+ operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
+ }
+
+ operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default;
+#else
+ bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
- && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
+ && ( displayPlaneProperties == rhs.displayPlaneProperties );
}
- bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
- const void* pNext = {};
- uint32_t drmFormatModifierCount = {};
- const uint64_t* pDrmFormatModifiers = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
+
};
- static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageDrmFormatModifierPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
+ {
+ using Type = DisplayPlaneProperties2KHR;
+ };
+
+ struct DisplayPropertiesKHR
{
- ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char* displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT
+ : display( display_ ), displayName( displayName_ ), physicalDimensions( physicalDimensions_ ), physicalResolution( physicalResolution_ ), supportedTransforms( supportedTransforms_ ), planeReorderPossible( planeReorderPossible_ ), persistentContent( persistentContent_ )
{}
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
return *this;
}
- ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPropertiesKHR ) );
+ return *this;
+ }
+
+
+ operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
+ }
+
+ operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPropertiesKHR const& ) const = default;
+#else
+ bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( display == rhs.display )
+ && ( displayName == rhs.displayName )
+ && ( physicalDimensions == rhs.physicalDimensions )
+ && ( physicalResolution == rhs.physicalResolution )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( planeReorderPossible == rhs.planeReorderPossible )
+ && ( persistentContent == rhs.persistentContent );
+ }
+
+ bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
+ const char* displayName = {};
+ VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
+ VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
+ VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
+
+ };
+ static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct DisplayProperties2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : displayProperties( displayProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
return *this;
}
- operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayProperties2KHR ) );
+ return *this;
}
- operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
+ return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
}
- bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayProperties2KHR const& ) const = default;
+#else
+ bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( drmFormatModifier == rhs.drmFormatModifier );
+ && ( displayProperties == rhs.displayProperties );
}
- bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
void* pNext = {};
- uint64_t drmFormatModifier = {};
+ VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
+
};
- static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageFormatListCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
{
- VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {},
- const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT
- : viewFormatCount( viewFormatCount_ )
- , pViewFormats( pViewFormats_ )
+ using Type = DisplayProperties2KHR;
+ };
+
+ struct PhysicalDeviceExternalBufferInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), usage( usage_ ), handleType( handleType_ )
{}
- VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) - offsetof( ImageFormatListCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
+ return *this;
}
- ImageFormatListCreateInfo& operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) );
return *this;
}
- ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- viewFormatCount = viewFormatCount_;
+ flags = flags_;
return *this;
}
- ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
- pViewFormats = pViewFormats_;
+ usage = usage_;
return *this;
}
- operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
+ handleType = handleType_;
+ return *this;
}
- operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
}
- bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( viewFormatCount == rhs.viewFormatCount )
- && ( pViewFormats == rhs.pViewFormats );
+ && ( flags == rhs.flags )
+ && ( usage == rhs.usage )
+ && ( handleType == rhs.handleType );
}
- bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
const void* pNext = {};
- uint32_t viewFormatCount = {};
- const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
+ VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
};
- static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
- struct ImageFormatProperties2
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
+ {
+ using Type = PhysicalDeviceExternalBufferInfo;
+ };
+ using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+
+ struct ExternalMemoryProperties
{
- ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageFormatProperties( imageFormatProperties_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
{}
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) - offsetof( ImageFormatProperties2, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
return *this;
}
- ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryProperties ) );
+ return *this;
+ }
+
+
+ operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
+ }
+
+ operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExternalMemoryProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalMemoryProperties const& ) const = default;
+#else
+ bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ }
+
+ bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
+
+ };
+ static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+ using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+
+ struct ExternalBufferProperties
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : externalMemoryProperties( externalMemoryProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
return *this;
}
- operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+ ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageFormatProperties2*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalBufferProperties ) );
+ return *this;
}
- operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageFormatProperties2*>( this );
+ return *reinterpret_cast<const VkExternalBufferProperties*>( this );
}
- bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExternalBufferProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalBufferProperties const& ) const = default;
+#else
+ bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( imageFormatProperties == rhs.imageFormatProperties );
+ && ( externalMemoryProperties == rhs.externalMemoryProperties );
}
- bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+
};
- static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
- struct ImageSubresourceRange
+ template <>
+ struct CppType<StructureType, StructureType::eExternalBufferProperties>
{
- VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- uint32_t baseMipLevel_ = {},
- uint32_t levelCount_ = {},
- uint32_t baseArrayLayer_ = {},
- uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectMask( aspectMask_ )
- , baseMipLevel( baseMipLevel_ )
- , levelCount( levelCount_ )
- , baseArrayLayer( baseArrayLayer_ )
- , layerCount( layerCount_ )
+ using Type = ExternalBufferProperties;
+ };
+ using ExternalBufferPropertiesKHR = ExternalBufferProperties;
+
+ struct PhysicalDeviceExternalFenceInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
{}
- ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
return *this;
}
- ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- aspectMask = aspectMask_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) );
return *this;
}
- ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- baseMipLevel = baseMipLevel_;
+ pNext = pNext_;
return *this;
}
- ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- levelCount = levelCount_;
+ handleType = handleType_;
return *this;
}
- ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT
{
- baseArrayLayer = baseArrayLayer_;
+ return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
+ }
+
+ operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+ };
+ static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
+ {
+ using Type = PhysicalDeviceExternalFenceInfo;
+ };
+ using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+
+ struct ExternalFenceProperties
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+ : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalFenceFeatures( externalFenceFeatures_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
return *this;
}
- ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+ ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layerCount = layerCount_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalFenceProperties ) );
return *this;
}
- operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageSubresourceRange*>( this );
+ return *reinterpret_cast<const VkExternalFenceProperties*>( this );
}
- operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageSubresourceRange*>( this );
+ return *reinterpret_cast<VkExternalFenceProperties*>( this );
}
- bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalFenceProperties const& ) const = default;
+#else
+ bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( aspectMask == rhs.aspectMask )
- && ( baseMipLevel == rhs.baseMipLevel )
- && ( levelCount == rhs.levelCount )
- && ( baseArrayLayer == rhs.baseArrayLayer )
- && ( layerCount == rhs.layerCount );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+ && ( externalFenceFeatures == rhs.externalFenceFeatures );
}
- bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
- uint32_t baseMipLevel = {};
- uint32_t levelCount = {};
- uint32_t baseArrayLayer = {};
- uint32_t layerCount = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
+
};
- static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
- struct ImageMemoryBarrier
+ template <>
+ struct CppType<StructureType, StructureType::eExternalFenceProperties>
{
- VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t srcQueueFamilyIndex_ = {},
- uint32_t dstQueueFamilyIndex_ = {},
- VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , oldLayout( oldLayout_ )
- , newLayout( newLayout_ )
- , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
- , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
- , image( image_ )
- , subresourceRange( subresourceRange_ )
+ using Type = ExternalFenceProperties;
+ };
+ using ExternalFencePropertiesKHR = ExternalFenceProperties;
+
+ struct ImageFormatProperties
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxExtent( maxExtent_ ), maxMipLevels( maxMipLevels_ ), maxArrayLayers( maxArrayLayers_ ), sampleCounts( sampleCounts_ ), maxResourceSize( maxResourceSize_ )
{}
- VULKAN_HPP_NAMESPACE::ImageMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) - offsetof( ImageMemoryBarrier, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
return *this;
}
- ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageFormatProperties ) );
+ return *this;
+ }
+
+
+ operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageFormatProperties*>( this );
+ }
+
+ operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageFormatProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageFormatProperties const& ) const = default;
+#else
+ bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( maxExtent == rhs.maxExtent )
+ && ( maxMipLevels == rhs.maxMipLevels )
+ && ( maxArrayLayers == rhs.maxArrayLayers )
+ && ( sampleCounts == rhs.sampleCounts )
+ && ( maxResourceSize == rhs.maxResourceSize );
+ }
+
+ bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
+ uint32_t maxMipLevels = {};
+ uint32_t maxArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
+
+ };
+ static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct ExternalImageFormatPropertiesNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : imageFormatProperties( imageFormatProperties_ ), externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
return *this;
}
- ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalImageFormatPropertiesNV ) );
return *this;
}
- ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
- srcAccessMask = srcAccessMask_;
- return *this;
+ return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
}
- ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
- dstAccessMask = dstAccessMask_;
- return *this;
+ return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
}
- ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default;
+#else
+ bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- oldLayout = oldLayout_;
- return *this;
+ return ( imageFormatProperties == rhs.imageFormatProperties )
+ && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
}
- ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- newLayout = newLayout_;
- return *this;
+ return !operator==( rhs );
}
+#endif
- ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
+
+ };
+ static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceExternalSemaphoreInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- srcQueueFamilyIndex = srcQueueFamilyIndex_;
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
return *this;
}
- ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- dstQueueFamilyIndex = dstQueueFamilyIndex_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) );
return *this;
}
- ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- image = image_;
+ pNext = pNext_;
return *this;
}
- ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- subresourceRange = subresourceRange_;
+ handleType = handleType_;
return *this;
}
- operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
}
- operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
}
- bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask )
- && ( oldLayout == rhs.oldLayout )
- && ( newLayout == rhs.newLayout )
- && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
- && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
- && ( image == rhs.image )
- && ( subresourceRange == rhs.subresourceRange );
+ && ( handleType == rhs.handleType );
}
- bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
- VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- uint32_t srcQueueFamilyIndex = {};
- uint32_t dstQueueFamilyIndex = {};
- VULKAN_HPP_NAMESPACE::Image image = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
};
- static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
- struct ImageMemoryRequirementsInfo2
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
{
- VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
+ using Type = PhysicalDeviceExternalSemaphoreInfo;
+ };
+ using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+
+ struct ExternalSemaphoreProperties
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+ : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalSemaphoreFeatures( externalSemaphoreFeatures_ )
{}
- VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
return *this;
}
- ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalSemaphoreProperties ) );
+ return *this;
+ }
+
+
+ operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
+ }
+
+ operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalSemaphoreProperties const& ) const = default;
+#else
+ bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+ && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+ }
+
+ bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
+
+ };
+ static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
+ {
+ using Type = ExternalSemaphoreProperties;
+ };
+ using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+
+ struct PhysicalDeviceFeatures2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}) VULKAN_HPP_NOEXCEPT
+ : features( features_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
return *this;
}
- ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFeatures2 ) );
+ return *this;
+ }
+
+ PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
{
- image = image_;
+ features = features_;
return *this;
}
- operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
}
- operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
}
- bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( image == rhs.image );
+ && ( features == rhs.features );
}
- bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Image image = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
+
};
- static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_FUCHSIA
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
+ {
+ using Type = PhysicalDeviceFeatures2;
+ };
+ using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
- struct ImagePipeSurfaceCreateInfoFUCHSIA
+ struct FormatProperties
{
- VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {},
- zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , imagePipeHandle( imagePipeHandle_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+ : linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
{}
- VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
return *this;
}
- ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FormatProperties ) );
return *this;
}
- ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ return *reinterpret_cast<const VkFormatProperties*>( this );
+ }
+
+ operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkFormatProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FormatProperties const& ) const = default;
+#else
+ bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( linearTilingFeatures == rhs.linearTilingFeatures )
+ && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+ && ( bufferFeatures == rhs.bufferFeatures );
+ }
+
+ bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
+
+ };
+ static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct FormatProperties2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : formatProperties( formatProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
return *this;
}
- ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
+ FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- imagePipeHandle = imagePipeHandle_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FormatProperties2 ) );
return *this;
}
- operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+ return *reinterpret_cast<const VkFormatProperties2*>( this );
}
- operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+ return *reinterpret_cast<VkFormatProperties2*>( this );
}
- bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FormatProperties2 const& ) const = default;
+#else
+ bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( imagePipeHandle == rhs.imagePipeHandle );
+ && ( formatProperties == rhs.formatProperties );
}
- bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
- zx_handle_t imagePipeHandle = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
+
};
- static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );
- struct ImagePlaneMemoryRequirementsInfo
+ template <>
+ struct CppType<StructureType, StructureType::eFormatProperties2>
+ {
+ using Type = FormatProperties2;
+ };
+ using FormatProperties2KHR = FormatProperties2;
+
+ struct PhysicalDeviceImageFormatInfo2
{
- VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : planeAspect( planeAspect_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : format( format_ ), type( type_ ), tiling( tiling_ ), usage( usage_ ), flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo & operator=( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
+ return *this;
}
- ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) );
return *this;
}
- ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
- planeAspect = planeAspect_;
+ format = format_;
return *this;
}
- operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
+ type = type_;
+ return *this;
}
- operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
+ tiling = tiling_;
+ return *this;
}
- bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
+ }
+
+ operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( planeAspect == rhs.planeAspect );
+ && ( format == rhs.format )
+ && ( type == rhs.type )
+ && ( tiling == rhs.tiling )
+ && ( usage == rhs.usage )
+ && ( flags == rhs.flags );
}
- bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+
};
- static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
- struct ImageResolve
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
{
- VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ )
- , srcOffset( srcOffset_ )
- , dstSubresource( dstSubresource_ )
- , dstOffset( dstOffset_ )
- , extent( extent_ )
+ using Type = PhysicalDeviceImageFormatInfo2;
+ };
+ using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+
+ struct ImageFormatProperties2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : imageFormatProperties( imageFormatProperties_ )
{}
- ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
return *this;
}
- ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- srcSubresource = srcSubresource_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageFormatProperties2 ) );
return *this;
}
- ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
{
- srcOffset = srcOffset_;
- return *this;
+ return *reinterpret_cast<const VkImageFormatProperties2*>( this );
}
- ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
- dstSubresource = dstSubresource_;
- return *this;
+ return *reinterpret_cast<VkImageFormatProperties2*>( this );
}
- ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageFormatProperties2 const& ) const = default;
+#else
+ bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- dstOffset = dstOffset_;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( imageFormatProperties == rhs.imageFormatProperties );
+ }
+
+ bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+
+ };
+ static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageFormatProperties2>
+ {
+ using Type = ImageFormatProperties2;
+ };
+ using ImageFormatProperties2KHR = ImageFormatProperties2;
+
+ struct MemoryType
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : propertyFlags( propertyFlags_ ), heapIndex( heapIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
return *this;
}
- ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
{
- extent = extent_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryType ) );
return *this;
}
- operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageResolve*>( this );
+ return *reinterpret_cast<const VkMemoryType*>( this );
}
- operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageResolve*>( this );
+ return *reinterpret_cast<VkMemoryType*>( this );
}
- bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryType const& ) const = default;
+#else
+ bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( srcSubresource == rhs.srcSubresource )
- && ( srcOffset == rhs.srcOffset )
- && ( dstSubresource == rhs.dstSubresource )
- && ( dstOffset == rhs.dstOffset )
- && ( extent == rhs.extent );
+ return ( propertyFlags == rhs.propertyFlags )
+ && ( heapIndex == rhs.heapIndex );
}
- bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
- VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+ VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
+ uint32_t heapIndex = {};
+
};
- static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );
- struct ImageSparseMemoryRequirementsInfo2
+ struct MemoryHeap
{
- VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : size( size_ ), flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
return *this;
}
- ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryHeap ) );
+ return *this;
+ }
+
+
+ operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryHeap*>( this );
+ }
+
+ operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryHeap*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryHeap const& ) const = default;
+#else
+ bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( size == rhs.size )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
+
+ };
+ static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceMemoryProperties
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryTypeCount( memoryTypeCount_ ), memoryTypes( memoryTypes_ ), memoryHeapCount( memoryHeapCount_ ), memoryHeaps( memoryHeaps_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
return *this;
}
- ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryProperties ) );
return *this;
}
- ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
{
- image = image_;
+ return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
+ }
+
+ operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( memoryTypeCount == rhs.memoryTypeCount )
+ && ( memoryTypes == rhs.memoryTypes )
+ && ( memoryHeapCount == rhs.memoryHeapCount )
+ && ( memoryHeaps == rhs.memoryHeaps );
+ }
+
+ bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t memoryTypeCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
+ uint32_t memoryHeapCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceMemoryProperties2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryProperties( memoryProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
return *this;
}
- operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryProperties2 ) );
+ return *this;
}
- operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
}
- bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( image == rhs.image );
+ && ( memoryProperties == rhs.memoryProperties );
}
- bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Image image = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
+
};
- static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
- struct ImageStencilUsageCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
+ {
+ using Type = PhysicalDeviceMemoryProperties2;
+ };
+ using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+
+ struct MultisamplePropertiesEXT
{
- VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT
- : stencilUsage( stencilUsage_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
{}
- VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) - offsetof( ImageStencilUsageCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
+ return *this;
}
- ImageStencilUsageCreateInfo& operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MultisamplePropertiesEXT ) );
return *this;
}
- ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
+ }
+
+ operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MultisamplePropertiesEXT const& ) const = default;
+#else
+ bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+ }
+
+ bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+
+ };
+ static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
+ {
+ using Type = MultisamplePropertiesEXT;
+ };
+
+ struct PhysicalDeviceLimits
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array<uint32_t,2> const& maxViewportDimensions_ = {}, std::array<float,2> const& viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array<float,2> const& pointSizeRange_ = {}, std::array<float,2> const& lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxImageDimension1D( maxImageDimension1D_ ), maxImageDimension2D( maxImageDimension2D_ ), maxImageDimension3D( maxImageDimension3D_ ), maxImageDimensionCube( maxImageDimensionCube_ ), maxImageArrayLayers( maxImageArrayLayers_ ), maxTexelBufferElements( maxTexelBufferElements_ ), maxUniformBufferRange( maxUniformBufferRange_ ), maxStorageBufferRange( maxStorageBufferRange_ ), maxPushConstantsSize( maxPushConstantsSize_ ), maxMemoryAllocationCount( maxMemoryAllocationCount_ ), maxSamplerAllocationCount( maxSamplerAllocationCount_ ), bufferImageGranularity( bufferImageGranularity_ ), sparseAddressSpaceSize( sparseAddressSpaceSize_ ), maxBoundDescriptorSets( maxBoundDescriptorSets_ ), maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ), maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ), maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ), maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ), maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ), maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ), maxPerStageResources( maxPerStageResources_ ), maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ), maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ), maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ), maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ), maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ), maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ), maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ), maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ), maxVertexInputAttributes( maxVertexInputAttributes_ ), maxVertexInputBindings( maxVertexInputBindings_ ), maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ), maxVertexInputBindingStride( maxVertexInputBindingStride_ ), maxVertexOutputComponents( maxVertexOutputComponents_ ), maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ), maxTessellationPatchSize( maxTessellationPatchSize_ ), maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ), maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ), maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ), maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ), maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ), maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ), maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ), maxGeometryInputComponents( maxGeometryInputComponents_ ), maxGeometryOutputComponents( maxGeometryOutputComponents_ ), maxGeometryOutputVertices( maxGeometryOutputVertices_ ), maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ), maxFragmentInputComponents( maxFragmentInputComponents_ ), maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ), maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ), maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ), maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ), maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ), maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ), maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ), subPixelPrecisionBits( subPixelPrecisionBits_ ), subTexelPrecisionBits( subTexelPrecisionBits_ ), mipmapPrecisionBits( mipmapPrecisionBits_ ), maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ), maxDrawIndirectCount( maxDrawIndirectCount_ ), maxSamplerLodBias( maxSamplerLodBias_ ), maxSamplerAnisotropy( maxSamplerAnisotropy_ ), maxViewports( maxViewports_ ), maxViewportDimensions( maxViewportDimensions_ ), viewportBoundsRange( viewportBoundsRange_ ), viewportSubPixelBits( viewportSubPixelBits_ ), minMemoryMapAlignment( minMemoryMapAlignment_ ), minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ), minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ), minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ), minTexelOffset( minTexelOffset_ ), maxTexelOffset( maxTexelOffset_ ), minTexelGatherOffset( minTexelGatherOffset_ ), maxTexelGatherOffset( maxTexelGatherOffset_ ), minInterpolationOffset( minInterpolationOffset_ ), maxInterpolationOffset( maxInterpolationOffset_ ), subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ), maxFramebufferWidth( maxFramebufferWidth_ ), maxFramebufferHeight( maxFramebufferHeight_ ), maxFramebufferLayers( maxFramebufferLayers_ ), framebufferColorSampleCounts( framebufferColorSampleCounts_ ), framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ), framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ), framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ), maxColorAttachments( maxColorAttachments_ ), sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ), sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ), sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ), sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ), storageImageSampleCounts( storageImageSampleCounts_ ), maxSampleMaskWords( maxSampleMaskWords_ ), timestampComputeAndGraphics( timestampComputeAndGraphics_ ), timestampPeriod( timestampPeriod_ ), maxClipDistances( maxClipDistances_ ), maxCullDistances( maxCullDistances_ ), maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ), discreteQueuePriorities( discreteQueuePriorities_ ), pointSizeRange( pointSizeRange_ ), lineWidthRange( lineWidthRange_ ), pointSizeGranularity( pointSizeGranularity_ ), lineWidthGranularity( lineWidthGranularity_ ), strictLines( strictLines_ ), standardSampleLocations( standardSampleLocations_ ), optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ), optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ), nonCoherentAtomSize( nonCoherentAtomSize_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
return *this;
}
- ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
{
- stencilUsage = stencilUsage_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceLimits ) );
return *this;
}
- operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
}
- operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
}
- bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceLimits const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( stencilUsage == rhs.stencilUsage );
+ return ( maxImageDimension1D == rhs.maxImageDimension1D )
+ && ( maxImageDimension2D == rhs.maxImageDimension2D )
+ && ( maxImageDimension3D == rhs.maxImageDimension3D )
+ && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
+ && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
+ && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
+ && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
+ && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
+ && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
+ && ( bufferImageGranularity == rhs.bufferImageGranularity )
+ && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
+ && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
+ && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
+ && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
+ && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
+ && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
+ && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
+ && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
+ && ( maxPerStageResources == rhs.maxPerStageResources )
+ && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
+ && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
+ && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
+ && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
+ && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
+ && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
+ && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
+ && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
+ && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
+ && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
+ && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
+ && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
+ && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
+ && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
+ && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
+ && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
+ && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
+ && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
+ && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
+ && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
+ && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
+ && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
+ && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
+ && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
+ && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
+ && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
+ && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
+ && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
+ && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
+ && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
+ && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
+ && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount )
+ && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
+ && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize )
+ && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
+ && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
+ && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
+ && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
+ && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
+ && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
+ && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
+ && ( maxViewports == rhs.maxViewports )
+ && ( maxViewportDimensions == rhs.maxViewportDimensions )
+ && ( viewportBoundsRange == rhs.viewportBoundsRange )
+ && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
+ && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
+ && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
+ && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
+ && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
+ && ( minTexelOffset == rhs.minTexelOffset )
+ && ( maxTexelOffset == rhs.maxTexelOffset )
+ && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
+ && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
+ && ( minInterpolationOffset == rhs.minInterpolationOffset )
+ && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
+ && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
+ && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
+ && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
+ && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
+ && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
+ && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
+ && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
+ && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
+ && ( maxColorAttachments == rhs.maxColorAttachments )
+ && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
+ && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
+ && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
+ && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
+ && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
+ && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
+ && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
+ && ( timestampPeriod == rhs.timestampPeriod )
+ && ( maxClipDistances == rhs.maxClipDistances )
+ && ( maxCullDistances == rhs.maxCullDistances )
+ && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
+ && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
+ && ( pointSizeRange == rhs.pointSizeRange )
+ && ( lineWidthRange == rhs.lineWidthRange )
+ && ( pointSizeGranularity == rhs.pointSizeGranularity )
+ && ( lineWidthGranularity == rhs.lineWidthGranularity )
+ && ( strictLines == rhs.strictLines )
+ && ( standardSampleLocations == rhs.standardSampleLocations )
+ && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
+ && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
+ && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
}
- bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
+ uint32_t maxImageDimension1D = {};
+ uint32_t maxImageDimension2D = {};
+ uint32_t maxImageDimension3D = {};
+ uint32_t maxImageDimensionCube = {};
+ uint32_t maxImageArrayLayers = {};
+ uint32_t maxTexelBufferElements = {};
+ uint32_t maxUniformBufferRange = {};
+ uint32_t maxStorageBufferRange = {};
+ uint32_t maxPushConstantsSize = {};
+ uint32_t maxMemoryAllocationCount = {};
+ uint32_t maxSamplerAllocationCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
+ uint32_t maxBoundDescriptorSets = {};
+ uint32_t maxPerStageDescriptorSamplers = {};
+ uint32_t maxPerStageDescriptorUniformBuffers = {};
+ uint32_t maxPerStageDescriptorStorageBuffers = {};
+ uint32_t maxPerStageDescriptorSampledImages = {};
+ uint32_t maxPerStageDescriptorStorageImages = {};
+ uint32_t maxPerStageDescriptorInputAttachments = {};
+ uint32_t maxPerStageResources = {};
+ uint32_t maxDescriptorSetSamplers = {};
+ uint32_t maxDescriptorSetUniformBuffers = {};
+ uint32_t maxDescriptorSetUniformBuffersDynamic = {};
+ uint32_t maxDescriptorSetStorageBuffers = {};
+ uint32_t maxDescriptorSetStorageBuffersDynamic = {};
+ uint32_t maxDescriptorSetSampledImages = {};
+ uint32_t maxDescriptorSetStorageImages = {};
+ uint32_t maxDescriptorSetInputAttachments = {};
+ uint32_t maxVertexInputAttributes = {};
+ uint32_t maxVertexInputBindings = {};
+ uint32_t maxVertexInputAttributeOffset = {};
+ uint32_t maxVertexInputBindingStride = {};
+ uint32_t maxVertexOutputComponents = {};
+ uint32_t maxTessellationGenerationLevel = {};
+ uint32_t maxTessellationPatchSize = {};
+ uint32_t maxTessellationControlPerVertexInputComponents = {};
+ uint32_t maxTessellationControlPerVertexOutputComponents = {};
+ uint32_t maxTessellationControlPerPatchOutputComponents = {};
+ uint32_t maxTessellationControlTotalOutputComponents = {};
+ uint32_t maxTessellationEvaluationInputComponents = {};
+ uint32_t maxTessellationEvaluationOutputComponents = {};
+ uint32_t maxGeometryShaderInvocations = {};
+ uint32_t maxGeometryInputComponents = {};
+ uint32_t maxGeometryOutputComponents = {};
+ uint32_t maxGeometryOutputVertices = {};
+ uint32_t maxGeometryTotalOutputComponents = {};
+ uint32_t maxFragmentInputComponents = {};
+ uint32_t maxFragmentOutputAttachments = {};
+ uint32_t maxFragmentDualSrcAttachments = {};
+ uint32_t maxFragmentCombinedOutputResources = {};
+ uint32_t maxComputeSharedMemorySize = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
+ uint32_t maxComputeWorkGroupInvocations = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
+ uint32_t subPixelPrecisionBits = {};
+ uint32_t subTexelPrecisionBits = {};
+ uint32_t mipmapPrecisionBits = {};
+ uint32_t maxDrawIndexedIndexValue = {};
+ uint32_t maxDrawIndirectCount = {};
+ float maxSamplerLodBias = {};
+ float maxSamplerAnisotropy = {};
+ uint32_t maxViewports = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
+ uint32_t viewportSubPixelBits = {};
+ size_t minMemoryMapAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
+ int32_t minTexelOffset = {};
+ uint32_t maxTexelOffset = {};
+ int32_t minTexelGatherOffset = {};
+ uint32_t maxTexelGatherOffset = {};
+ float minInterpolationOffset = {};
+ float maxInterpolationOffset = {};
+ uint32_t subPixelInterpolationOffsetBits = {};
+ uint32_t maxFramebufferWidth = {};
+ uint32_t maxFramebufferHeight = {};
+ uint32_t maxFramebufferLayers = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
+ uint32_t maxColorAttachments = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
+ uint32_t maxSampleMaskWords = {};
+ VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
+ float timestampPeriod = {};
+ uint32_t maxClipDistances = {};
+ uint32_t maxCullDistances = {};
+ uint32_t maxCombinedClipAndCullDistances = {};
+ uint32_t discreteQueuePriorities = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
+ float pointSizeGranularity = {};
+ float lineWidthGranularity = {};
+ VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
+
};
- static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
- struct ImageSwapchainCreateInfoKHR
+ struct PhysicalDeviceSparseProperties
{
- VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT
+ : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ), residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ), residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ), residencyAlignedMipSize( residencyAlignedMipSize_ ), residencyNonResidentStrict( residencyNonResidentStrict_ )
{}
- VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
return *this;
}
- ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSparseProperties ) );
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
+ }
+
+ operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
+ && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
+ && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
+ && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
+ && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+ }
+
+ bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceProperties
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : apiVersion( apiVersion_ ), driverVersion( driverVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), deviceType( deviceType_ ), deviceName( deviceName_ ), pipelineCacheUUID( pipelineCacheUUID_ ), limits( limits_ ), sparseProperties( sparseProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
return *this;
}
- ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProperties ) );
return *this;
}
- ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT
{
- swapchain = swapchain_;
+ return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
+ }
+
+ operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceProperties const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( apiVersion == rhs.apiVersion )
+ && ( driverVersion == rhs.driverVersion )
+ && ( vendorID == rhs.vendorID )
+ && ( deviceID == rhs.deviceID )
+ && ( deviceType == rhs.deviceType )
+ && ( deviceName == rhs.deviceName )
+ && ( pipelineCacheUUID == rhs.pipelineCacheUUID )
+ && ( limits == rhs.limits )
+ && ( sparseProperties == rhs.sparseProperties );
+ }
+
+ bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t apiVersion = {};
+ uint32_t driverVersion = {};
+ uint32_t vendorID = {};
+ uint32_t deviceID = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceProperties2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
+ : properties( properties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
return *this;
}
- operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProperties2 ) );
+ return *this;
}
- operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
}
- bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceProperties2 const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( swapchain == rhs.swapchain );
+ && ( properties == rhs.properties );
}
- bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
+
};
- static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
- struct ImageViewASTCDecodeModeEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
+ {
+ using Type = PhysicalDeviceProperties2;
+ };
+ using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
+
+ struct QueryPoolPerformanceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
- : decodeMode( decodeMode_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t* pCounterIndices_ = {}) VULKAN_HPP_NOEXCEPT
+ : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( counterIndexCount_ ), pCounterIndices( pCounterIndices_ )
{}
- VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT & operator=( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) );
- return *this;
+ *this = rhs;
}
- ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ )
+ : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
+ return *this;
}
- ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( QueryPoolPerformanceCreateInfoKHR ) );
return *this;
}
- ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
- decodeMode = decodeMode_;
+ queueFamilyIndex = queueFamilyIndex_;
return *this;
}
- operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
+ counterIndexCount = counterIndexCount_;
+ return *this;
}
- operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
+ pCounterIndices = pCounterIndices_;
+ return *this;
}
- bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
+ pCounterIndices = counterIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
+ }
+
+ operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( decodeMode == rhs.decodeMode );
+ && ( queueFamilyIndex == rhs.queueFamilyIndex )
+ && ( counterIndexCount == rhs.counterIndexCount )
+ && ( pCounterIndices == rhs.pCounterIndices );
}
- bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint32_t queueFamilyIndex = {};
+ uint32_t counterIndexCount = {};
+ const uint32_t* pCounterIndices = {};
+
};
- static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct ImageViewCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
+ {
+ using Type = QueryPoolPerformanceCreateInfoKHR;
+ };
+
+ struct QueueFamilyProperties
{
- VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , image( image_ )
- , viewType( viewType_ )
- , format( format_ )
- , components( components_ )
- , subresourceRange( subresourceRange_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+ : queueFlags( queueFlags_ ), queueCount( queueCount_ ), timestampValidBits( timestampValidBits_ ), minImageTransferGranularity( minImageTransferGranularity_ )
{}
- VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) - offsetof( ImageViewCreateInfo, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
return *this;
}
- ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( QueueFamilyProperties ) );
+ return *this;
+ }
+
+
+ operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
+ }
+
+ operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueueFamilyProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueueFamilyProperties const& ) const = default;
+#else
+ bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( queueFlags == rhs.queueFlags )
+ && ( queueCount == rhs.queueCount )
+ && ( timestampValidBits == rhs.timestampValidBits )
+ && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
+ }
+
+ bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
+ uint32_t queueCount = {};
+ uint32_t timestampValidBits = {};
+ VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
+
+ };
+ static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct QueueFamilyProperties2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : queueFamilyProperties( queueFamilyProperties_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
return *this;
}
- ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( QueueFamilyProperties2 ) );
return *this;
}
- ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
+ }
+
+ operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueueFamilyProperties2 const& ) const = default;
+#else
+ bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( queueFamilyProperties == rhs.queueFamilyProperties );
+ }
+
+ bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
+
+ };
+ static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
+ {
+ using Type = QueueFamilyProperties2;
+ };
+ using QueueFamilyProperties2KHR = QueueFamilyProperties2;
+
+ struct PhysicalDeviceSparseImageFormatInfo2
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal) VULKAN_HPP_NOEXCEPT
+ : format( format_ ), type( type_ ), samples( samples_ ), usage( usage_ ), tiling( tiling_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
return *this;
}
- ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- image = image_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) );
return *this;
}
- ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- viewType = viewType_;
+ pNext = pNext_;
return *this;
}
- ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
- components = components_;
+ type = type_;
return *this;
}
- ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
- subresourceRange = subresourceRange_;
+ samples = samples_;
return *this;
}
- operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
+ usage = usage_;
+ return *this;
}
- operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageViewCreateInfo*>( this );
+ tiling = tiling_;
+ return *this;
}
- bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
+ }
+
+ operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( image == rhs.image )
- && ( viewType == rhs.viewType )
&& ( format == rhs.format )
- && ( components == rhs.components )
- && ( subresourceRange == rhs.subresourceRange );
+ && ( type == rhs.type )
+ && ( samples == rhs.samples )
+ && ( usage == rhs.usage )
+ && ( tiling == rhs.tiling );
}
- bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Image image = {};
- VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+ VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+
};
- static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
- struct ImageViewHandleInfoNVX
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
+ {
+ using Type = PhysicalDeviceSparseImageFormatInfo2;
+ };
+ using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
+
+ struct SparseImageFormatProperties2
{
- VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageView( imageView_ )
- , descriptorType( descriptorType_ )
- , sampler( sampler_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
+ : properties( properties_ )
{}
- VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) - offsetof( ImageViewHandleInfoNVX, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
return *this;
}
- ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageFormatProperties2 ) );
+ return *this;
+ }
+
+
+ operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
+ }
+
+ operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SparseImageFormatProperties2 const& ) const = default;
+#else
+ bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( properties == rhs.properties );
+ }
+
+ bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
+
+ };
+ static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
+ {
+ using Type = SparseImageFormatProperties2;
+ };
+ using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
+
+ struct FramebufferMixedSamplesCombinationNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}) VULKAN_HPP_NOEXCEPT
+ : coverageReductionMode( coverageReductionMode_ ), rasterizationSamples( rasterizationSamples_ ), depthStencilSamples( depthStencilSamples_ ), colorSamples( colorSamples_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
return *this;
}
- ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferMixedSamplesCombinationNV ) );
return *this;
}
- ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT
{
- imageView = imageView_;
- return *this;
+ return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
}
- ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
{
- descriptorType = descriptorType_;
+ return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default;
+#else
+ bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( coverageReductionMode == rhs.coverageReductionMode )
+ && ( rasterizationSamples == rhs.rasterizationSamples )
+ && ( depthStencilSamples == rhs.depthStencilSamples )
+ && ( colorSamples == rhs.colorSamples );
+ }
+
+ bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
+
+ };
+ static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
+ {
+ using Type = FramebufferMixedSamplesCombinationNV;
+ };
+
+ struct SurfaceCapabilities2EXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
+ : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ), supportedSurfaceCounters( supportedSurfaceCounters_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
return *this;
}
- ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- sampler = sampler_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilities2EXT ) );
return *this;
}
- operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
+ return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
}
- operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
+ return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
}
- bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceCapabilities2EXT const& ) const = default;
+#else
+ bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( imageView == rhs.imageView )
- && ( descriptorType == rhs.descriptorType )
- && ( sampler == rhs.sampler );
+ && ( minImageCount == rhs.minImageCount )
+ && ( maxImageCount == rhs.maxImageCount )
+ && ( currentExtent == rhs.currentExtent )
+ && ( minImageExtent == rhs.minImageExtent )
+ && ( maxImageExtent == rhs.maxImageExtent )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( currentTransform == rhs.currentTransform )
+ && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+ && ( supportedUsageFlags == rhs.supportedUsageFlags )
+ && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
}
- bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageView imageView = {};
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
- VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+ void* pNext = {};
+ uint32_t minImageCount = {};
+ uint32_t maxImageCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+ uint32_t maxImageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
+
};
- static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
- struct ImageViewUsageCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
{
- VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT
- : usage( usage_ )
+ using Type = SurfaceCapabilities2EXT;
+ };
+
+ struct SurfaceCapabilitiesKHR
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
+ : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ )
{}
- VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) - offsetof( ImageViewUsageCreateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
+ return *this;
}
- ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilitiesKHR ) );
return *this;
}
- ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
+ }
+
+ operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default;
+#else
+ bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( minImageCount == rhs.minImageCount )
+ && ( maxImageCount == rhs.maxImageCount )
+ && ( currentExtent == rhs.currentExtent )
+ && ( minImageExtent == rhs.minImageExtent )
+ && ( maxImageExtent == rhs.maxImageExtent )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( currentTransform == rhs.currentTransform )
+ && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+ && ( supportedUsageFlags == rhs.supportedUsageFlags );
+ }
+
+ bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t minImageCount = {};
+ uint32_t maxImageCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+ uint32_t maxImageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+
+ };
+ static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct SurfaceCapabilities2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}) VULKAN_HPP_NOEXCEPT
+ : surfaceCapabilities( surfaceCapabilities_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
return *this;
}
- ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- usage = usage_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilities2KHR ) );
return *this;
}
- operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
+ return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
}
- operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
+ return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
}
- bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceCapabilities2KHR const& ) const = default;
+#else
+ bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( usage == rhs.usage );
+ && ( surfaceCapabilities == rhs.surfaceCapabilities );
}
- bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
+
};
- static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
+ {
+ using Type = SurfaceCapabilities2KHR;
+ };
- struct ImportAndroidHardwareBufferInfoANDROID
+ struct SurfaceFormatKHR
{
- VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT
+ : format( format_ ), colorSpace( colorSpace_ )
{}
- VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
return *this;
}
- ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFormatKHR ) );
+ return *this;
+ }
+
+
+ operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
+ }
+
+ operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceFormatKHR const& ) const = default;
+#else
+ bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( format == rhs.format )
+ && ( colorSpace == rhs.colorSpace );
+ }
+
+ bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+
+ };
+ static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct SurfaceFormat2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}) VULKAN_HPP_NOEXCEPT
+ : surfaceFormat( surfaceFormat_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
return *this;
}
- ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFormat2KHR ) );
return *this;
}
- ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- buffer = buffer_;
+ return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
+ }
+
+ operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceFormat2KHR const& ) const = default;
+#else
+ bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( surfaceFormat == rhs.surfaceFormat );
+ }
+
+ bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
+
+ };
+ static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
+ {
+ using Type = SurfaceFormat2KHR;
+ };
+
+ struct PhysicalDeviceToolPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& name_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layer_ = {}) VULKAN_HPP_NOEXCEPT
+ : name( name_ ), version( version_ ), purposes( purposes_ ), description( description_ ), layer( layer_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>( &rhs );
return *this;
}
- operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceToolPropertiesEXT & operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceToolPropertiesEXT ) );
+ return *this;
}
- operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT*>( this );
}
- bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceToolPropertiesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( buffer == rhs.buffer );
+ && ( name == rhs.name )
+ && ( version == rhs.version )
+ && ( purposes == rhs.purposes )
+ && ( description == rhs.description )
+ && ( layer == rhs.layer );
}
- bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
- const void* pNext = {};
- struct AHardwareBuffer* buffer = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
+ VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
+
};
- static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct ImportFenceFdInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceToolPropertiesEXT>
{
- VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- int fd_ = {} ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , fd( fd_ )
+ using Type = PhysicalDeviceToolPropertiesEXT;
+ };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch> class UniqueHandleTraits<Device, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
+ using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ class PhysicalDevice
+ {
+ public:
+ using CType = VkPhysicalDevice;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
+
+ public:
+ VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT
+ : m_physicalDevice(VK_NULL_HANDLE)
{}
- VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_physicalDevice(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
+ : m_physicalDevice( physicalDevice )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) - offsetof( ImportFenceFdInfoKHR, pNext ) );
+ m_physicalDevice = physicalDevice;
return *this;
}
+#endif
- ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_physicalDevice = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevice const& ) const = default;
+#else
+ bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_physicalDevice == rhs.m_physicalDevice;
+ }
+
+ bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_physicalDevice != rhs.m_physicalDevice;
+ }
+
+ bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_physicalDevice < rhs.m_physicalDevice;
+ }
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Device,Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = Allocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if < std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & countersAllocator, PerformanceCounterDescriptionKHRAllocator & counterDescriptionsAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getFeatures2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
+ std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+ std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+ std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+ std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+ template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+ std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
+ std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+ std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+ std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_physicalDevice;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_physicalDevice != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_physicalDevice == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPhysicalDevice m_physicalDevice;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePhysicalDevice>
+ {
+ using type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PhysicalDevice>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct DeviceGroupDeviceCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {}) VULKAN_HPP_NOEXCEPT
+ : physicalDeviceCount( physicalDeviceCount_ ), pPhysicalDevices( pPhysicalDevices_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ )
+ : physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
return *this;
}
- ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupDeviceCreateInfo ) );
return *this;
}
- ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- fence = fence_;
+ pNext = pNext_;
return *this;
}
- ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ physicalDeviceCount = physicalDeviceCount_;
return *this;
}
- ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ pPhysicalDevices = pPhysicalDevices_;
return *this;
}
- ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
- fd = fd_;
+ physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
+ pPhysicalDevices = physicalDevices_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
+ return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
}
- operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
+ return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
}
- bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default;
+#else
+ bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( fence == rhs.fence )
- && ( flags == rhs.flags )
- && ( handleType == rhs.handleType )
- && ( fd == rhs.fd );
+ && ( physicalDeviceCount == rhs.physicalDeviceCount )
+ && ( pPhysicalDevices == rhs.pPhysicalDevices );
}
- bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
- int fd = {};
+ uint32_t physicalDeviceCount = {};
+ const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {};
+
};
- static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
+ {
+ using Type = DeviceGroupDeviceCreateInfo;
+ };
+ using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
- struct ImportFenceWin32HandleInfoKHR
+ struct DeviceGroupPresentInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , handle( handle_ )
- , name( name_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t* pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal) VULKAN_HPP_NOEXCEPT
+ : swapchainCount( swapchainCount_ ), pDeviceMasks( pDeviceMasks_ ), mode( mode_ )
{}
- VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal )
+ : swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
return *this;
}
- ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupPresentInfoKHR ) );
return *this;
}
- ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- fence = fence_;
+ pNext = pNext_;
return *this;
}
- ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ swapchainCount = swapchainCount_;
return *this;
}
- ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ pDeviceMasks = pDeviceMasks_;
return *this;
}
- ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
- handle = handle_;
+ swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
+ pDeviceMasks = deviceMasks_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
- name = name_;
+ mode = mode_;
return *this;
}
- operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
}
- operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
}
- bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default;
+#else
+ bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( fence == rhs.fence )
- && ( flags == rhs.flags )
- && ( handleType == rhs.handleType )
- && ( handle == rhs.handle )
- && ( name == rhs.name );
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pDeviceMasks == rhs.pDeviceMasks )
+ && ( mode == rhs.mode );
}
- bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
- HANDLE handle = {};
- LPCWSTR name = {};
+ uint32_t swapchainCount = {};
+ const uint32_t* pDeviceMasks = {};
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
+
};
- static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct ImportMemoryFdInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
{
- VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- int fd_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , fd( fd_ )
+ using Type = DeviceGroupPresentInfoKHR;
+ };
+
+ struct DeviceGroupRenderPassBeginInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceMask( deviceMask_ ), deviceRenderAreaCount( deviceRenderAreaCount_ ), pDeviceRenderAreas( pDeviceRenderAreas_ )
{}
- VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) - offsetof( ImportMemoryFdInfoKHR, pNext ) );
- return *this;
+ *this = rhs;
}
- ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
+ : deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
+ return *this;
}
- ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) );
return *this;
}
- ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ deviceMask = deviceMask_;
return *this;
}
- ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
{
- fd = fd_;
+ deviceRenderAreaCount = deviceRenderAreaCount_;
return *this;
}
- operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
+ pDeviceRenderAreas = pDeviceRenderAreas_;
+ return *this;
}
- operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
+ deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
+ pDeviceRenderAreas = deviceRenderAreas_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
+ }
+
+ operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default;
+#else
+ bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType )
- && ( fd == rhs.fd );
+ && ( deviceMask == rhs.deviceMask )
+ && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
+ && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
}
- bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- int fd = {};
+ uint32_t deviceMask = {};
+ uint32_t deviceRenderAreaCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {};
+
};
- static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
- struct ImportMemoryHostPointerInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
{
- VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- void* pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , pHostPointer( pHostPointer_ )
+ using Type = DeviceGroupRenderPassBeginInfo;
+ };
+ using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+
+ struct DeviceGroupSubmitInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t* pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t* pSignalSemaphoreDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ), commandBufferCount( commandBufferCount_ ), pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
{}
- VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) );
- return *this;
+ *this = rhs;
}
- ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {} )
+ : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
+ return *this;
}
- ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupSubmitInfo ) );
return *this;
}
- ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
- ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
- pHostPointer = pHostPointer_;
+ pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
return *this;
}
- operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
+ waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
+ pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
+ commandBufferCount = commandBufferCount_;
+ return *this;
}
- bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
+ pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreCount = signalSemaphoreCount_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
+ pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
+ }
+
+ operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupSubmitInfo const& ) const = default;
+#else
+ bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType )
- && ( pHostPointer == rhs.pHostPointer );
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
+ && ( commandBufferCount == rhs.commandBufferCount )
+ && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
+ && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+ && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
}
- bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- void* pHostPointer = {};
+ uint32_t waitSemaphoreCount = {};
+ const uint32_t* pWaitSemaphoreDeviceIndices = {};
+ uint32_t commandBufferCount = {};
+ const uint32_t* pCommandBufferDeviceMasks = {};
+ uint32_t signalSemaphoreCount = {};
+ const uint32_t* pSignalSemaphoreDeviceIndices = {};
+
};
- static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
+ {
+ using Type = DeviceGroupSubmitInfo;
+ };
+ using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
- struct ImportMemoryWin32HandleInfoKHR
+ struct DeviceGroupSwapchainCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , handle( handle_ )
- , name( name_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
+ : modes( modes_ )
{}
- VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
return *this;
}
- ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) );
return *this;
}
- ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ pNext = pNext_;
return *this;
}
- ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
{
- handle = handle_;
+ modes = modes_;
return *this;
}
- ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
- {
- name = name_;
- return *this;
- }
- operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
- operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
- bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType )
- && ( handle == rhs.handle )
- && ( name == rhs.name );
+ && ( modes == rhs.modes );
}
- bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- HANDLE handle = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
};
- static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
+ {
+ using Type = DeviceGroupSwapchainCreateInfoKHR;
+ };
- struct ImportMemoryWin32HandleInfoNV
+ struct DeviceMemoryOverallocationCreateInfoAMD
{
- VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {},
- HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , handle( handle_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault) VULKAN_HPP_NOEXCEPT
+ : overallocationBehavior( overallocationBehavior_ )
{}
- VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
return *this;
}
- ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) );
return *this;
}
- ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ pNext = pNext_;
return *this;
}
- ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
{
- handle = handle_;
+ overallocationBehavior = overallocationBehavior_;
return *this;
}
- operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
+ return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
- operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
+ return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
- bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default;
+#else
+ bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType )
- && ( handle == rhs.handle );
+ && ( overallocationBehavior == rhs.overallocationBehavior );
}
- bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
- HANDLE handle = {};
+ VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
+
};
- static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
- struct ImportSemaphoreFdInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
{
- VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- int fd_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , fd( fd_ )
+ using Type = DeviceMemoryOverallocationCreateInfoAMD;
+ };
+
+ struct DevicePrivateDataCreateInfoEXT
+ {
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT(uint32_t privateDataSlotRequestCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : privateDataSlotRequestCount( privateDataSlotRequestCount_ )
{}
- VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) - offsetof( ImportSemaphoreFdInfoKHR, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT const *>( &rhs );
+ return *this;
}
- ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DevicePrivateDataCreateInfoEXT & operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DevicePrivateDataCreateInfoEXT ) );
return *this;
}
- ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DevicePrivateDataCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ DevicePrivateDataCreateInfoEXT & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
{
- semaphore = semaphore_;
+ privateDataSlotRequestCount = privateDataSlotRequestCount_;
return *this;
}
- ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkDevicePrivateDataCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ return *reinterpret_cast<const VkDevicePrivateDataCreateInfoEXT*>( this );
+ }
+
+ operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDevicePrivateDataCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DevicePrivateDataCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
+ }
+
+ bool operator!=( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT;
+ const void* pNext = {};
+ uint32_t privateDataSlotRequestCount = {};
+
+ };
+ static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DevicePrivateDataCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfoEXT>
+ {
+ using Type = DevicePrivateDataCreateInfoEXT;
+ };
+
+ struct DeviceQueueGlobalPriorityCreateInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow) VULKAN_HPP_NOEXCEPT
+ : globalPriority( globalPriority_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>( &rhs );
return *this;
}
- ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) );
return *this;
}
- ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- fd = fd_;
+ pNext = pNext_;
return *this;
}
- operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
+ globalPriority = globalPriority_;
+ return *this;
}
- operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
+ return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
}
- bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( semaphore == rhs.semaphore )
- && ( flags == rhs.flags )
- && ( handleType == rhs.handleType )
- && ( fd == rhs.fd );
+ && ( globalPriority == rhs.globalPriority );
}
- bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- int fd = {};
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
+
};
- static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT>
+ {
+ using Type = DeviceQueueGlobalPriorityCreateInfoEXT;
+ };
- struct ImportSemaphoreWin32HandleInfoKHR
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ struct DirectFBSurfaceCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , handle( handle_ )
- , name( name_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB* dfb_ = {}, IDirectFBSurface* surface_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), dfb( dfb_ ), surface( surface_ )
{}
- VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DirectFBSurfaceCreateInfoEXT ) );
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- semaphore = semaphore_;
+ pNext = pNext_;
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB* dfb_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ dfb = dfb_;
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+ DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface* surface_ ) VULKAN_HPP_NOEXCEPT
{
- handle = handle_;
+ surface = surface_;
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
- {
- name = name_;
- return *this;
- }
- operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkDirectFBSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
}
- operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
}
- bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DirectFBSurfaceCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( semaphore == rhs.semaphore )
&& ( flags == rhs.flags )
- && ( handleType == rhs.handleType )
- && ( handle == rhs.handle )
- && ( name == rhs.name );
+ && ( dfb == rhs.dfb )
+ && ( surface == rhs.surface );
}
- bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- HANDLE handle = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {};
+ IDirectFB* dfb = {};
+ IDirectFBSurface* surface = {};
+
};
- static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct IndirectCommandsLayoutTokenNVX
+ template <>
+ struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
+ {
+ using Type = DirectFBSurfaceCreateInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+ struct DispatchIndirectCommand
{
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline,
- uint32_t bindingUnit_ = {},
- uint32_t dynamicCount_ = {},
- uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : tokenType( tokenType_ )
- , bindingUnit( bindingUnit_ )
- , dynamicCount( dynamicCount_ )
- , divisor( divisor_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ ), z( z_ )
{}
- IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
return *this;
}
- IndirectCommandsLayoutTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
+ DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- tokenType = tokenType_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DispatchIndirectCommand ) );
return *this;
}
- IndirectCommandsLayoutTokenNVX & setBindingUnit( uint32_t bindingUnit_ ) VULKAN_HPP_NOEXCEPT
+ DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
{
- bindingUnit = bindingUnit_;
+ x = x_;
return *this;
}
- IndirectCommandsLayoutTokenNVX & setDynamicCount( uint32_t dynamicCount_ ) VULKAN_HPP_NOEXCEPT
+ DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
{
- dynamicCount = dynamicCount_;
+ y = y_;
return *this;
}
- IndirectCommandsLayoutTokenNVX & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+ DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
{
- divisor = divisor_;
+ z = z_;
return *this;
}
- operator VkIndirectCommandsLayoutTokenNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>( this );
+ return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
}
- operator VkIndirectCommandsLayoutTokenNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkIndirectCommandsLayoutTokenNVX*>( this );
+ return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
}
- bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DispatchIndirectCommand const& ) const = default;
+#else
+ bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( tokenType == rhs.tokenType )
- && ( bindingUnit == rhs.bindingUnit )
- && ( dynamicCount == rhs.dynamicCount )
- && ( divisor == rhs.divisor );
+ return ( x == rhs.x )
+ && ( y == rhs.y )
+ && ( z == rhs.z );
}
- bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline;
- uint32_t bindingUnit = {};
- uint32_t dynamicCount = {};
- uint32_t divisor = {};
+ uint32_t x = {};
+ uint32_t y = {};
+ uint32_t z = {};
+
};
- static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
- struct IndirectCommandsLayoutCreateInfoNVX
+ struct DisplayNativeHdrSurfaceCapabilitiesAMD
{
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ = {},
- uint32_t tokenCount_ = {},
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineBindPoint( pipelineBindPoint_ )
- , flags( flags_ )
- , tokenCount( tokenCount_ )
- , pTokens( pTokens_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}) VULKAN_HPP_NOEXCEPT
+ : localDimmingSupport( localDimmingSupport_ )
{}
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX ) - offsetof( IndirectCommandsLayoutCreateInfoNVX, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) );
+ return *this;
+ }
+
+
+ operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+ }
+
+ operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default;
+#else
+ bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( localDimmingSupport == rhs.localDimmingSupport );
+ }
+
+ bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
+
+ };
+ static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
+ {
+ using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
+ };
+
+ struct DisplayPresentInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcRect( srcRect_ ), dstRect( dstRect_ ), persistent( persistent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPresentInfoKHR ) );
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- pipelineBindPoint = pipelineBindPoint_;
+ pNext = pNext_;
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ srcRect = srcRect_;
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
{
- tokenCount = tokenCount_;
+ dstRect = dstRect_;
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
{
- pTokens = pTokens_;
+ persistent = persistent_;
return *this;
}
- operator VkIndirectCommandsLayoutCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( this );
+ return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
}
- operator VkIndirectCommandsLayoutCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX*>( this );
+ return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
}
- bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplayPresentInfoKHR const& ) const = default;
+#else
+ bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pipelineBindPoint == rhs.pipelineBindPoint )
- && ( flags == rhs.flags )
- && ( tokenCount == rhs.tokenCount )
- && ( pTokens == rhs.pTokens );
+ && ( srcRect == rhs.srcRect )
+ && ( dstRect == rhs.dstRect )
+ && ( persistent == rhs.persistent );
}
- bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags = {};
- uint32_t tokenCount = {};
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens = {};
+ VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
+ VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
+ VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
+
};
- static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct InitializePerformanceApiInfoINTEL
+ template <>
+ struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
+ {
+ using Type = DisplayPresentInfoKHR;
+ };
+
+ struct DisplaySurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : pUserData( pUserData_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), displayMode( displayMode_ ), planeIndex( planeIndex_ ), planeStackIndex( planeStackIndex_ ), transform( transform_ ), globalAlpha( globalAlpha_ ), alphaMode( alphaMode_ ), imageExtent( imageExtent_ )
{}
- VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
+ return *this;
}
- InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
return *this;
}
- InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- pUserData = pUserData_;
+ flags = flags_;
return *this;
}
- operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
+ displayMode = displayMode_;
+ return *this;
}
- operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
+ planeIndex = planeIndex_;
+ return *this;
}
- bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ planeStackIndex = planeStackIndex_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ {
+ transform = transform_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
+ {
+ globalAlpha = globalAlpha_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ alphaMode = alphaMode_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+
+
+ operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
+ }
+
+ operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pUserData == rhs.pUserData );
+ && ( flags == rhs.flags )
+ && ( displayMode == rhs.displayMode )
+ && ( planeIndex == rhs.planeIndex )
+ && ( planeStackIndex == rhs.planeStackIndex )
+ && ( transform == rhs.transform )
+ && ( globalAlpha == rhs.globalAlpha )
+ && ( alphaMode == rhs.alphaMode )
+ && ( imageExtent == rhs.imageExtent );
}
- bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
const void* pNext = {};
- void* pUserData = {};
+ VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+ uint32_t planeIndex = {};
+ uint32_t planeStackIndex = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ float globalAlpha = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+
};
- static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct InputAttachmentAspectReference
+ template <>
+ struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
{
- VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {},
- uint32_t inputAttachmentIndex_ = {},
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : subpass( subpass_ )
- , inputAttachmentIndex( inputAttachmentIndex_ )
- , aspectMask( aspectMask_ )
+ using Type = DisplaySurfaceCreateInfoKHR;
+ };
+
+ struct DrawIndexedIndirectCommand
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
+ : indexCount( indexCount_ ), instanceCount( instanceCount_ ), firstIndex( firstIndex_ ), vertexOffset( vertexOffset_ ), firstInstance( firstInstance_ )
{}
- InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
return *this;
}
- InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- subpass = subpass_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DrawIndexedIndirectCommand ) );
return *this;
}
- InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
{
- inputAttachmentIndex = inputAttachmentIndex_;
+ indexCount = indexCount_;
return *this;
}
- InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
- aspectMask = aspectMask_;
+ instanceCount = instanceCount_;
return *this;
}
- operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
+ firstIndex = firstIndex_;
+ return *this;
}
- operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
+ vertexOffset = vertexOffset_;
+ return *this;
}
- bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+ DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
{
- return ( subpass == rhs.subpass )
- && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
- && ( aspectMask == rhs.aspectMask );
+ firstInstance = firstInstance_;
+ return *this;
}
- bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
+ }
+
+ operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DrawIndexedIndirectCommand const& ) const = default;
+#else
+ bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( indexCount == rhs.indexCount )
+ && ( instanceCount == rhs.instanceCount )
+ && ( firstIndex == rhs.firstIndex )
+ && ( vertexOffset == rhs.vertexOffset )
+ && ( firstInstance == rhs.firstInstance );
+ }
+
+ bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t subpass = {};
- uint32_t inputAttachmentIndex = {};
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t indexCount = {};
+ uint32_t instanceCount = {};
+ uint32_t firstIndex = {};
+ int32_t vertexOffset = {};
+ uint32_t firstInstance = {};
+
};
- static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
- struct InstanceCreateInfo
+ struct DrawIndirectCommand
{
- VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {},
- const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {},
- uint32_t enabledLayerCount_ = {},
- const char* const* ppEnabledLayerNames_ = {},
- uint32_t enabledExtensionCount_ = {},
- const char* const* ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pApplicationInfo( pApplicationInfo_ )
- , enabledLayerCount( enabledLayerCount_ )
- , ppEnabledLayerNames( ppEnabledLayerNames_ )
- , enabledExtensionCount( enabledExtensionCount_ )
- , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
+ : vertexCount( vertexCount_ ), instanceCount( instanceCount_ ), firstVertex( firstVertex_ ), firstInstance( firstInstance_ )
{}
- VULKAN_HPP_NAMESPACE::InstanceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) - offsetof( InstanceCreateInfo, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
return *this;
}
- InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DrawIndirectCommand ) );
+ return *this;
}
- InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>(&rhs);
+ vertexCount = vertexCount_;
return *this;
}
- InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ instanceCount = instanceCount_;
return *this;
}
- InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ firstVertex = firstVertex_;
return *this;
}
- InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
+ DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
{
- pApplicationInfo = pApplicationInfo_;
+ firstInstance = firstInstance_;
return *this;
}
- InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
{
- enabledLayerCount = enabledLayerCount_;
+ return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
+ }
+
+ operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDrawIndirectCommand*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DrawIndirectCommand const& ) const = default;
+#else
+ bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( vertexCount == rhs.vertexCount )
+ && ( instanceCount == rhs.instanceCount )
+ && ( firstVertex == rhs.firstVertex )
+ && ( firstInstance == rhs.firstInstance );
+ }
+
+ bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t vertexCount = {};
+ uint32_t instanceCount = {};
+ uint32_t firstVertex = {};
+ uint32_t firstInstance = {};
+
+ };
+ static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+
+ struct DrawMeshTasksIndirectCommandNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT
+ : taskCount( taskCount_ ), firstTask( firstTask_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
return *this;
}
- InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+ DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- ppEnabledLayerNames = ppEnabledLayerNames_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) );
return *this;
}
- InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+ DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
{
- enabledExtensionCount = enabledExtensionCount_;
+ taskCount = taskCount_;
return *this;
}
- InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
{
- ppEnabledExtensionNames = ppEnabledExtensionNames_;
+ firstTask = firstTask_;
return *this;
}
- operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
+ return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
}
- operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkInstanceCreateInfo*>( this );
+ return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
}
- bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default;
+#else
+ bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( pApplicationInfo == rhs.pApplicationInfo )
- && ( enabledLayerCount == rhs.enabledLayerCount )
- && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
- && ( enabledExtensionCount == rhs.enabledExtensionCount )
- && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
+ return ( taskCount == rhs.taskCount )
+ && ( firstTask == rhs.firstTask );
}
- bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
- const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {};
- uint32_t enabledLayerCount = {};
- const char* const* ppEnabledLayerNames = {};
- uint32_t enabledExtensionCount = {};
- const char* const* ppEnabledExtensionNames = {};
+ uint32_t taskCount = {};
+ uint32_t firstTask = {};
+
};
- static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
- struct LayerProperties
+ struct DrmFormatModifierPropertiesEXT
{
- LayerProperties( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layerName_ = {},
- uint32_t specVersion_ = {},
- uint32_t implementationVersion_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {} ) VULKAN_HPP_NOEXCEPT
- : layerName{}
- , specVersion( specVersion_ )
- , implementationVersion( implementationVersion_ )
- , description{}
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( layerName, layerName_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
+ return *this;
}
- LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DrmFormatModifierPropertiesEXT ) );
return *this;
}
- operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkLayerProperties*>( this );
+ return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
}
- operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkLayerProperties*>( this );
+ return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
}
- bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default;
+#else
+ bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( specVersion == rhs.specVersion )
- && ( implementationVersion == rhs.implementationVersion )
- && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+ return ( drmFormatModifier == rhs.drmFormatModifier )
+ && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+ && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
}
- bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- char layerName[VK_MAX_EXTENSION_NAME_SIZE] = {};
- uint32_t specVersion = {};
- uint32_t implementationVersion = {};
- char description[VK_MAX_DESCRIPTION_SIZE] = {};
- };
- static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );
+ uint64_t drmFormatModifier = {};
+ uint32_t drmFormatModifierPlaneCount = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
-#ifdef VK_USE_PLATFORM_MACOS_MVK
+ };
+ static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct MacOSSurfaceCreateInfoMVK
+ struct DrmFormatModifierPropertiesListEXT
{
- VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {},
- const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pView( pView_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
{}
- VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) - offsetof( MacOSSurfaceCreateInfoMVK, pNext ) );
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_ )
+ : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
return *this;
}
- MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) );
+ return *this;
+ }
+
+
+ operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
+ }
+
+ operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default;
+#else
+ bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+ && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
+ }
+
+ bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
+ void* pNext = {};
+ uint32_t drmFormatModifierCount = {};
+ VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {};
+
+ };
+ static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
+ {
+ using Type = DrmFormatModifierPropertiesListEXT;
+ };
+
+ struct ExportFenceCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
return *this;
}
- MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportFenceCreateInfo ) );
return *this;
}
- MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- pView = pView_;
+ handleTypes = handleTypes_;
return *this;
}
- operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
+ return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
}
- operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+ operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
+ return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
}
- bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportFenceCreateInfo const& ) const = default;
+#else
+ bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( pView == rhs.pView );
+ && ( handleTypes == rhs.handleTypes );
}
- bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
- const void* pView = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
+
};
- static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+ static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct MappedMemoryRange
+ template <>
+ struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
{
- VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- , offset( offset_ )
- , size( size_ )
+ using Type = ExportFenceCreateInfo;
+ };
+ using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportFenceWin32HandleInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
{}
- VULKAN_HPP_NAMESPACE::MappedMemoryRange & operator=( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) - offsetof( MappedMemoryRange, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
+ return *this;
}
- MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
return *this;
}
- MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
- memory = memory_;
+ pAttributes = pAttributes_;
return *this;
}
- MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
- offset = offset_;
+ dwAccess = dwAccess_;
return *this;
}
- MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
- size = size_;
+ name = name_;
return *this;
}
- operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMappedMemoryRange*>( this );
+ return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
}
- operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
+ operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMappedMemoryRange*>( this );
+ return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
}
- bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memory == rhs.memory )
- && ( offset == rhs.offset )
- && ( size == rhs.size );
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess )
+ && ( name == rhs.name );
}
- bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+ LPCWSTR name = {};
+
};
- static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct MemoryAllocateFlagsInfo
+ template <>
+ struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
+ {
+ using Type = ExportFenceWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ExportMemoryAllocateInfo
{
- VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {},
- uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , deviceMask( deviceMask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) - offsetof( MemoryAllocateFlagsInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
return *this;
}
- MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryAllocateInfo ) );
return *this;
}
- MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- deviceMask = deviceMask_;
+ handleTypes = handleTypes_;
return *this;
}
- operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
+ return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
}
- operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
+ return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
}
- bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportMemoryAllocateInfo const& ) const = default;
+#else
+ bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( deviceMask == rhs.deviceMask );
+ && ( handleTypes == rhs.handleTypes );
}
- bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
- uint32_t deviceMask = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
};
- static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- struct MemoryAllocateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
{
- VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
- uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : allocationSize( allocationSize_ )
- , memoryTypeIndex( memoryTypeIndex_ )
+ using Type = ExportMemoryAllocateInfo;
+ };
+ using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+
+ struct ExportMemoryAllocateInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) - offsetof( MemoryAllocateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
return *this;
}
- MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
return *this;
}
- MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- allocationSize = allocationSize_;
+ pNext = pNext_;
return *this;
}
- MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- memoryTypeIndex = memoryTypeIndex_;
+ handleTypes = handleTypes_;
return *this;
}
- operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
+ return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
}
- operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
+ return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
}
- bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default;
+#else
+ bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( allocationSize == rhs.allocationSize )
- && ( memoryTypeIndex == rhs.memoryTypeIndex );
+ && ( handleTypes == rhs.handleTypes );
}
- bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
- uint32_t memoryTypeIndex = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+
};
- static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct MemoryBarrier
+ template <>
+ struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
+ {
+ using Type = ExportMemoryAllocateInfoNV;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportMemoryWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) - offsetof( MemoryBarrier, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
+ return *this;
}
- MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
return *this;
}
- MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
- srcAccessMask = srcAccessMask_;
+ pAttributes = pAttributes_;
return *this;
}
- MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
- dstAccessMask = dstAccessMask_;
+ dwAccess = dwAccess_;
return *this;
}
- operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryBarrier*>( this );
+ name = name_;
+ return *this;
}
- operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+
+ operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryBarrier*>( this );
+ return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
}
- bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask );
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess )
+ && ( name == rhs.name );
}
- bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+ LPCWSTR name = {};
+
};
- static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct MemoryDedicatedAllocateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
+ {
+ using Type = ExportMemoryWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportMemoryWin32HandleInfoNV
{
- VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , buffer( buffer_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ ), dwAccess( dwAccess_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
+ return *this;
}
- MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
return *this;
}
- MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
- image = image_;
+ pAttributes = pAttributes_;
return *this;
}
- MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
- buffer = buffer_;
+ dwAccess = dwAccess_;
return *this;
}
- operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
+ return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
}
- operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
+ return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
}
- bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default;
+#else
+ bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( image == rhs.image )
- && ( buffer == rhs.buffer );
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess );
}
- bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Image image = {};
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+
};
- static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct MemoryDedicatedRequirements
+ template <>
+ struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
+ {
+ using Type = ExportMemoryWin32HandleInfoNV;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ExportSemaphoreCreateInfo
{
- MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : prefersDedicatedAllocation( prefersDedicatedAllocation_ )
- , requiresDedicatedAllocation( requiresDedicatedAllocation_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
return *this;
}
- MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportSemaphoreCreateInfo ) );
+ return *this;
}
- MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
+ handleTypes = handleTypes_;
+ return *this;
}
- operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
+
+ operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
+ return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
}
- bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default;
+#else
+ bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
- && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+ && ( handleTypes == rhs.handleTypes );
}
- bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
- VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
+
};
- static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct MemoryFdPropertiesKHR
+ template <>
+ struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
+ {
+ using Type = ExportSemaphoreCreateInfo;
+ };
+ using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportSemaphoreWin32HandleInfoKHR
{
- MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) - offsetof( MemoryFdPropertiesKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
+ return *this;
}
- MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
+ pAttributes = pAttributes_;
+ return *this;
}
- operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
+ dwAccess = dwAccess_;
+ return *this;
}
- bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ {
+ name = name_;
+ return *this;
+ }
+
+
+ operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
+ }
+
+ operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memoryTypeBits == rhs.memoryTypeBits );
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess )
+ && ( name == rhs.name );
}
- bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
- void* pNext = {};
- uint32_t memoryTypeBits = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+ const void* pNext = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+ LPCWSTR name = {};
+
};
- static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
+ {
+ using Type = ExportSemaphoreWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct MemoryGetAndroidHardwareBufferInfoANDROID
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ struct ExternalFormatANDROID
{
- VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}) VULKAN_HPP_NOEXCEPT
+ : externalFormat( externalFormat_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
+ return *this;
}
- MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalFormatANDROID ) );
return *this;
}
- MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
{
- memory = memory_;
+ externalFormat = externalFormat_;
return *this;
}
- operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+ return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
}
- operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+ return *reinterpret_cast<VkExternalFormatANDROID*>( this );
}
- bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalFormatANDROID const& ) const = default;
+#else
+ bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memory == rhs.memory );
+ && ( externalFormat == rhs.externalFormat );
}
- bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
+ void* pNext = {};
+ uint64_t externalFormat = {};
+
+ };
+ static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eExternalFormatANDROID>
+ {
+ using Type = ExternalFormatANDROID;
};
- static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- struct MemoryGetFdInfoKHR
+ struct ExternalImageFormatProperties
{
- VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- , handleType( handleType_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
+ : externalMemoryProperties( externalMemoryProperties_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) - offsetof( MemoryGetFdInfoKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
return *this;
}
- MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalImageFormatProperties ) );
+ return *this;
+ }
+
+
+ operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
+ }
+
+ operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalImageFormatProperties const& ) const = default;
+#else
+ bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( externalMemoryProperties == rhs.externalMemoryProperties );
+ }
+
+ bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+
+ };
+ static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
+ {
+ using Type = ExternalImageFormatProperties;
+ };
+ using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+
+ struct ExternalMemoryBufferCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
return *this;
}
- MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryBufferCreateInfo ) );
return *this;
}
- MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- memory = memory_;
+ pNext = pNext_;
return *this;
}
- MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ handleTypes = handleTypes_;
return *this;
}
- operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
+ return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
}
- operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
+ return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
}
- bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default;
+#else
+ bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memory == rhs.memory )
- && ( handleType == rhs.handleType );
+ && ( handleTypes == rhs.handleTypes );
}
- bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
};
- static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
+ {
+ using Type = ExternalMemoryBufferCreateInfo;
+ };
+ using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
- struct MemoryGetWin32HandleInfoKHR
+ struct ExternalMemoryImageCreateInfo
{
- VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- , handleType( handleType_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
return *this;
}
- MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryImageCreateInfo ) );
return *this;
}
- MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- memory = memory_;
+ pNext = pNext_;
return *this;
}
- MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ handleTypes = handleTypes_;
return *this;
}
- operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
}
- operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
+ return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
}
- bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default;
+#else
+ bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memory == rhs.memory )
- && ( handleType == rhs.handleType );
+ && ( handleTypes == rhs.handleTypes );
}
- bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
};
- static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct MemoryHeap
+ template <>
+ struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
+ {
+ using Type = ExternalMemoryImageCreateInfo;
+ };
+ using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+
+ struct ExternalMemoryImageCreateInfoNV
{
- MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : size( size_ )
- , flags( flags_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
- MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
return *this;
}
- operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryHeap*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
+ return *this;
}
- operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryHeap*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
- return ( size == rhs.size )
- && ( flags == rhs.flags );
+ handleTypes = handleTypes_;
+ return *this;
}
- bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
+ }
+
+ operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default;
+#else
+ bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
- VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+
};
- static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct MemoryHostPointerPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
{
- MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
+ using Type = ExternalMemoryImageCreateInfoNV;
+ };
+
+ struct FilterCubicImageViewImageFormatPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}) VULKAN_HPP_NOEXCEPT
+ : filterCubic( filterCubic_ ), filterCubicMinmax( filterCubicMinmax_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
+ return *this;
}
- MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) );
return *this;
}
- operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
+ return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
}
- operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
+ return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
}
- bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default;
+#else
+ bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memoryTypeBits == rhs.memoryTypeBits );
+ && ( filterCubic == rhs.filterCubic )
+ && ( filterCubicMinmax == rhs.filterCubicMinmax );
}
- bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
void* pNext = {};
- uint32_t memoryTypeBits = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
+
};
- static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct MemoryOpaqueCaptureAddressAllocateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
{
- VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : opaqueCaptureAddress( opaqueCaptureAddress_ )
+ using Type = FilterCubicImageViewImageFormatPropertiesEXT;
+ };
+
+ struct FramebufferAttachmentImageInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) - offsetof( MemoryOpaqueCaptureAddressAllocateInfo, pNext ) );
- return *this;
+ *this = rhs;
}
- MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
+ : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
+ return *this;
}
- MemoryOpaqueCaptureAddressAllocateInfo& operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferAttachmentImageInfo ) );
return *this;
}
- MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- opaqueCaptureAddress = opaqueCaptureAddress_;
+ flags = flags_;
return *this;
}
- operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+ usage = usage_;
+ return *this;
}
- operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+ width = width_;
+ return *this;
}
- bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ {
+ height = height_;
+ return *this;
+ }
+
+ FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layerCount = layerCount_;
+ return *this;
+ }
+
+ FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewFormatCount = viewFormatCount_;
+ return *this;
+ }
+
+ FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pViewFormats = pViewFormats_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
+ pViewFormats = viewFormats_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
+ }
+
+ operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FramebufferAttachmentImageInfo const& ) const = default;
+#else
+ bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+ && ( flags == rhs.flags )
+ && ( usage == rhs.usage )
+ && ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( layerCount == rhs.layerCount )
+ && ( viewFormatCount == rhs.viewFormatCount )
+ && ( pViewFormats == rhs.pViewFormats );
}
- bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
const void* pNext = {};
- uint64_t opaqueCaptureAddress = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t layerCount = {};
+ uint32_t viewFormatCount = {};
+ const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
+
};
- static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
- struct MemoryPriorityAllocateInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
{
- VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT
- : priority( priority_ )
+ using Type = FramebufferAttachmentImageInfo;
+ };
+ using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
+
+ struct FramebufferAttachmentsCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {}) VULKAN_HPP_NOEXCEPT
+ : attachmentImageInfoCount( attachmentImageInfoCount_ ), pAttachmentImageInfos( pAttachmentImageInfos_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) );
- return *this;
+ *this = rhs;
}
- MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ )
+ : attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
+ return *this;
}
- MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferAttachmentsCreateInfo ) );
return *this;
}
- MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
- priority = priority_;
+ attachmentImageInfoCount = attachmentImageInfoCount_;
return *this;
}
- operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
+ pAttachmentImageInfos = pAttachmentImageInfos_;
+ return *this;
}
- operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
+ attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
+ pAttachmentImageInfos = attachmentImageInfos_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
+ }
+
+ operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( FramebufferAttachmentsCreateInfo const& ) const = default;
+#else
+ bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( priority == rhs.priority );
+ && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
+ && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
}
- bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
const void* pNext = {};
- float priority = {};
+ uint32_t attachmentImageInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {};
+
};
- static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct MemoryRequirements
+ template <>
+ struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
+ {
+ using Type = FramebufferAttachmentsCreateInfo;
+ };
+ using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
+
+ struct GraphicsShaderGroupCreateInfoNV
{
- MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {},
- uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : size( size_ )
- , alignment( alignment_ )
- , memoryTypeBits( memoryTypeBits_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}) VULKAN_HPP_NOEXCEPT
+ : stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
{}
- MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} )
+ : stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
return *this;
}
- operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryRequirements*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GraphicsShaderGroupCreateInfoNV ) );
+ return *this;
}
- operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryRequirements*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
- return ( size == rhs.size )
- && ( alignment == rhs.alignment )
- && ( memoryTypeBits == rhs.memoryTypeBits );
+ stageCount = stageCount_;
+ return *this;
}
- bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ pStages = pStages_;
+ return *this;
}
- public:
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
- VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
- uint32_t memoryTypeBits = {};
- };
- static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
-
- struct MemoryRequirements2
- {
- MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryRequirements( memoryRequirements_ )
- {}
-
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) );
+ stageCount = static_cast<uint32_t>( stages_.size() );
+ pStages = stages_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ pVertexInputState = pVertexInputState_;
+ return *this;
}
- MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>(&rhs);
+ pTessellationState = pTessellationState_;
return *this;
}
- operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryRequirements2*>( this );
+ return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
}
- operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryRequirements2*>( this );
+ return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
}
- bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default;
+#else
+ bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memoryRequirements == rhs.memoryRequirements );
+ && ( stageCount == rhs.stageCount )
+ && ( pStages == rhs.pStages )
+ && ( pVertexInputState == rhs.pVertexInputState )
+ && ( pTessellationState == rhs.pTessellationState );
}
- bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+ const void* pNext = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
+
};
- static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct MemoryType
+ template <>
+ struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
{
- MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {},
- uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : propertyFlags( propertyFlags_ )
- , heapIndex( heapIndex_ )
+ using Type = GraphicsShaderGroupCreateInfoNV;
+ };
+
+ struct GraphicsPipelineShaderGroupsCreateInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {}) VULKAN_HPP_NOEXCEPT
+ : groupCount( groupCount_ ), pGroups( pGroups_ ), pipelineCount( pipelineCount_ ), pPipelines( pPipelines_ )
{}
- MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {} )
+ : groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast<uint32_t>( pipelines_.size() ) ), pPipelines( pipelines_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) );
return *this;
}
- operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryType*>( this );
+ pNext = pNext_;
+ return *this;
}
- operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryType*>( this );
+ groupCount = groupCount_;
+ return *this;
}
- bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
{
- return ( propertyFlags == rhs.propertyFlags )
- && ( heapIndex == rhs.heapIndex );
+ pGroups = pGroups_;
+ return *this;
}
- bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCount = static_cast<uint32_t>( groups_.size() );
+ pGroups = groups_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineCount = pipelineCount_;
+ return *this;
+ }
+
+ GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPipelines = pPipelines_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineCount = static_cast<uint32_t>( pipelines_.size() );
+ pPipelines = pipelines_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
+ }
+
+ operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default;
+#else
+ bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( groupCount == rhs.groupCount )
+ && ( pGroups == rhs.pGroups )
+ && ( pipelineCount == rhs.pipelineCount )
+ && ( pPipelines == rhs.pPipelines );
+ }
+
+ bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
- uint32_t heapIndex = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+ const void* pNext = {};
+ uint32_t groupCount = {};
+ const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {};
+ uint32_t pipelineCount = {};
+ const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {};
+
};
- static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GraphicsPipelineShaderGroupsCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
+ {
+ using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
+ };
- struct MemoryWin32HandlePropertiesKHR
+ struct HeadlessSurfaceCreateInfoEXT
{
- MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
- VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
return *this;
}
- MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( HeadlessSurfaceCreateInfoEXT ) );
+ return *this;
}
- MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
+ flags = flags_;
+ return *this;
}
- operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
+ return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
}
- bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memoryTypeBits == rhs.memoryTypeBits );
+ && ( flags == rhs.flags );
}
- bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
- void* pNext = {};
- uint32_t memoryTypeBits = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
+
};
- static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_METAL_EXT
+ template <>
+ struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
+ {
+ using Type = HeadlessSurfaceCreateInfoEXT;
+ };
- struct MetalSurfaceCreateInfoEXT
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ struct IOSSurfaceCreateInfoMVK
{
- VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {},
- const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pLayer( pLayer_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pView( pView_ )
{}
- VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
+ return *this;
}
- MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
return *this;
}
- MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT
+ IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
{
- pLayer = pLayer_;
+ pView = pView_;
return *this;
}
- operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
}
- operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
}
- bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default;
+#else
+ bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( pLayer == rhs.pLayer );
+ && ( pView == rhs.pView );
}
- bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
- const CAMetalLayer* pLayer = {};
+ VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
+ const void* pView = {};
+
};
- static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
+ static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
- struct MultisamplePropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
{
- MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+ using Type = IOSSurfaceCreateInfoMVK;
+ };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+ struct ImageDrmFormatModifierExplicitCreateInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), pPlaneLayouts( pPlaneLayouts_ )
{}
- VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ )
+ : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
return *this;
}
- MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) );
+ return *this;
}
- MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
+ drmFormatModifier = drmFormatModifier_;
+ return *this;
}
- operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
+ drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
+ return *this;
}
- bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPlaneLayouts = pPlaneLayouts_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
+ pPlaneLayouts = planeLayouts_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+ }
+
+ operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+ && ( drmFormatModifier == rhs.drmFormatModifier )
+ && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+ && ( pPlaneLayouts == rhs.pPlaneLayouts );
}
- bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+ const void* pNext = {};
+ uint64_t drmFormatModifier = {};
+ uint32_t drmFormatModifierPlaneCount = {};
+ const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {};
+
+ };
+ static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
+ {
+ using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
};
- static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTableCreateInfoNVX
+ struct ImageDrmFormatModifierListCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = {},
- const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = {},
- const uint32_t* pObjectEntryCounts_ = {},
- const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = {},
- uint32_t maxUniformBuffersPerDescriptor_ = {},
- uint32_t maxStorageBuffersPerDescriptor_ = {},
- uint32_t maxStorageImagesPerDescriptor_ = {},
- uint32_t maxSampledImagesPerDescriptor_ = {},
- uint32_t maxPipelineLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectCount( objectCount_ )
- , pObjectEntryTypes( pObjectEntryTypes_ )
- , pObjectEntryCounts( pObjectEntryCounts_ )
- , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
- , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
- , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
- , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
- , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
- , maxPipelineLayouts( maxPipelineLayouts_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t* pDrmFormatModifiers_ = {}) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifiers( pDrmFormatModifiers_ )
{}
- VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) );
- return *this;
+ *this = rhs;
}
- ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ )
+ : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
+ return *this;
}
- ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) );
return *this;
}
- ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
{
- objectCount = objectCount_;
+ drmFormatModifierCount = drmFormatModifierCount_;
return *this;
}
- ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
{
- pObjectEntryTypes = pObjectEntryTypes_;
+ pDrmFormatModifiers = pDrmFormatModifiers_;
return *this;
}
- ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
{
- pObjectEntryCounts = pObjectEntryCounts_;
+ drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
+ pDrmFormatModifiers = drmFormatModifiers_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
- ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+ }
+
+ operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+ && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
+ }
+
+ bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+ const void* pNext = {};
+ uint32_t drmFormatModifierCount = {};
+ const uint64_t* pDrmFormatModifiers = {};
+
+ };
+ static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
+ {
+ using Type = ImageDrmFormatModifierListCreateInfoEXT;
+ };
+
+ struct ImageFormatListCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
+ : viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
+ : viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
return *this;
}
- ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageFormatListCreateInfo ) );
return *this;
}
- ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
+ pNext = pNext_;
return *this;
}
- ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
+ viewFormatCount = viewFormatCount_;
return *this;
}
- ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
- maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
+ pViewFormats = pViewFormats_;
return *this;
}
- ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
{
- maxPipelineLayouts = maxPipelineLayouts_;
+ viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
+ pViewFormats = viewFormats_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkObjectTableCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>( this );
+ return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
}
- operator VkObjectTableCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTableCreateInfoNVX*>( this );
+ return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
}
- bool operator==( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageFormatListCreateInfo const& ) const = default;
+#else
+ bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( objectCount == rhs.objectCount )
- && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
- && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
- && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
- && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
- && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
- && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
- && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
- && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
+ && ( viewFormatCount == rhs.viewFormatCount )
+ && ( pViewFormats == rhs.pViewFormats );
}
- bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
const void* pNext = {};
- uint32_t objectCount = {};
- const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes = {};
- const uint32_t* pObjectEntryCounts = {};
- const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags = {};
- uint32_t maxUniformBuffersPerDescriptor = {};
- uint32_t maxStorageBuffersPerDescriptor = {};
- uint32_t maxStorageImagesPerDescriptor = {};
- uint32_t maxSampledImagesPerDescriptor = {};
- uint32_t maxPipelineLayouts = {};
+ uint32_t viewFormatCount = {};
+ const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
+
};
- static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTableCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTableEntryNVX
+ template <>
+ struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
+ using Type = ImageFormatListCreateInfo;
+ };
+ using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+ struct ImagePipeSurfaceCreateInfoFUCHSIA
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), imagePipeHandle( imagePipeHandle_ )
{}
- ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
return *this;
}
- ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) );
return *this;
}
- ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- operator VkObjectTableEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+ ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTableEntryNVX*>( this );
+ imagePipeHandle = imagePipeHandle_;
+ return *this;
}
- operator VkObjectTableEntryNVX &() VULKAN_HPP_NOEXCEPT
+
+ operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTableEntryNVX*>( this );
+ return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
}
- bool operator==( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
- return ( type == rhs.type )
- && ( flags == rhs.flags );
+ return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
}
- bool operator!=( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default;
+#else
+ bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
+ }
+
+ bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
+ zx_handle_t imagePipeHandle = {};
+
};
- static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTableEntryNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTableDescriptorSetEntryNVX
+ template <>
+ struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
{
- VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , pipelineLayout( pipelineLayout_ )
- , descriptorSet( descriptorSet_ )
- {}
+ using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
+ };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
- explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} )
- : type( objectTableEntryNVX.type )
- , flags( objectTableEntryNVX.flags )
- , pipelineLayout( pipelineLayout_ )
- , descriptorSet( descriptorSet_ )
+ struct ImagePlaneMemoryRequirementsInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
+ : planeAspect( planeAspect_ )
{}
- ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableDescriptorSetEntryNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) );
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
- pipelineLayout = pipelineLayout_;
+ planeAspect = planeAspect_;
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT
- {
- descriptorSet = descriptorSet_;
- return *this;
- }
- operator VkObjectTableDescriptorSetEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>( this );
+ return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
}
- operator VkObjectTableDescriptorSetEntryNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTableDescriptorSetEntryNVX*>( this );
+ return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
}
- bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default;
+#else
+ bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( type == rhs.type )
- && ( flags == rhs.flags )
- && ( pipelineLayout == rhs.pipelineLayout )
- && ( descriptorSet == rhs.descriptorSet );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( planeAspect == rhs.planeAspect );
}
- bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
- VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
};
- static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTableDescriptorSetEntryNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTableIndexBufferEntryNVX
+ template <>
+ struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
{
- VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , buffer( buffer_ )
- , indexType( indexType_ )
- {}
+ using Type = ImagePlaneMemoryRequirementsInfo;
+ };
+ using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
- explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 )
- : type( objectTableEntryNVX.type )
- , flags( objectTableEntryNVX.flags )
- , buffer( buffer_ )
- , indexType( indexType_ )
+ struct ImageStencilUsageCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}) VULKAN_HPP_NOEXCEPT
+ : stencilUsage( stencilUsage_ )
{}
- ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableIndexBufferEntryNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
return *this;
}
- ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageStencilUsageCreateInfo ) );
return *this;
}
- ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
{
- buffer = buffer_;
+ stencilUsage = stencilUsage_;
return *this;
}
- ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
- {
- indexType = indexType_;
- return *this;
- }
- operator VkObjectTableIndexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>( this );
+ return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
}
- operator VkObjectTableIndexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTableIndexBufferEntryNVX*>( this );
+ return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
}
- bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageStencilUsageCreateInfo const& ) const = default;
+#else
+ bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( type == rhs.type )
- && ( flags == rhs.flags )
- && ( buffer == rhs.buffer )
- && ( indexType == rhs.indexType );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( stencilUsage == rhs.stencilUsage );
}
- bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
- VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
+
};
- static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTableIndexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTablePipelineEntryNVX
+ template <>
+ struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , pipeline( pipeline_ )
- {}
+ using Type = ImageStencilUsageCreateInfo;
+ };
+ using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
+
+ struct ImageSwapchainCreateInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
- explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} )
- : type( objectTableEntryNVX.type )
- , flags( objectTableEntryNVX.flags )
- , pipeline( pipeline_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}) VULKAN_HPP_NOEXCEPT
+ : swapchain( swapchain_ )
{}
- ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePipelineEntryNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
return *this;
}
- ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSwapchainCreateInfoKHR ) );
return *this;
}
- ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
- pipeline = pipeline_;
+ swapchain = swapchain_;
return *this;
}
- operator VkObjectTablePipelineEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>( this );
+ return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
}
- operator VkObjectTablePipelineEntryNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTablePipelineEntryNVX*>( this );
+ return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
}
- bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default;
+#else
+ bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( type == rhs.type )
- && ( flags == rhs.flags )
- && ( pipeline == rhs.pipeline );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchain == rhs.swapchain );
}
- bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
- VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+
+ };
+ static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
+ {
+ using Type = ImageSwapchainCreateInfoKHR;
};
- static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTablePipelineEntryNVX>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTablePushConstantEntryNVX
+ struct ImageViewASTCDecodeModeEXT
{
- VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , pipelineLayout( pipelineLayout_ )
- , stageFlags( stageFlags_ )
- {}
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
- explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} )
- : type( objectTableEntryNVX.type )
- , flags( objectTableEntryNVX.flags )
- , pipelineLayout( pipelineLayout_ )
- , stageFlags( stageFlags_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
+ : decodeMode( decodeMode_ )
{}
- ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePushConstantEntryNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
return *this;
}
- ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewASTCDecodeModeEXT ) );
return *this;
}
- ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
{
- pipelineLayout = pipelineLayout_;
+ decodeMode = decodeMode_;
return *this;
}
- ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT
{
- stageFlags = stageFlags_;
+ return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
+ }
+
+ operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default;
+#else
+ bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( decodeMode == rhs.decodeMode );
+ }
+
+ bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+ };
+ static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
+ {
+ using Type = ImageViewASTCDecodeModeEXT;
+ };
+
+ struct ImageViewUsageCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}) VULKAN_HPP_NOEXCEPT
+ : usage( usage_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
return *this;
}
- operator VkObjectTablePushConstantEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+ ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewUsageCreateInfo ) );
+ return *this;
}
- operator VkObjectTablePushConstantEntryNVX &() VULKAN_HPP_NOEXCEPT
+ ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTablePushConstantEntryNVX*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
- return ( type == rhs.type )
- && ( flags == rhs.flags )
- && ( pipelineLayout == rhs.pipelineLayout )
- && ( stageFlags == rhs.stageFlags );
+ usage = usage_;
+ return *this;
+ }
+
+
+ operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
+ }
+
+ operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageViewUsageCreateInfo const& ) const = default;
+#else
+ bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( usage == rhs.usage );
}
- bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+
};
- static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTablePushConstantEntryNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct ObjectTableVertexBufferEntryNVX
+ template <>
+ struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
{
- VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , buffer( buffer_ )
- {}
+ using Type = ImageViewUsageCreateInfo;
+ };
+ using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ struct ImportAndroidHardwareBufferInfoANDROID
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
- explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} )
- : type( objectTableEntryNVX.type )
- , flags( objectTableEntryNVX.flags )
- , buffer( buffer_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer* buffer_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
{}
- ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableVertexBufferEntryNVX const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
return *this;
}
- ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) );
return *this;
}
- ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- operator VkObjectTableVertexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>( this );
+ return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
}
- operator VkObjectTableVertexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkObjectTableVertexBufferEntryNVX*>( this );
+ return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
}
- bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default;
+#else
+ bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( type == rhs.type )
- && ( flags == rhs.flags )
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
}
- bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+ const void* pNext = {};
+ struct AHardwareBuffer* buffer = {};
+
};
- static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ObjectTableVertexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
- struct PastPresentationTimingGOOGLE
+ template <>
+ struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
+ {
+ using Type = ImportAndroidHardwareBufferInfoANDROID;
+ };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ struct ImportMemoryFdInfoKHR
{
- PastPresentationTimingGOOGLE( uint32_t presentID_ = {},
- uint64_t desiredPresentTime_ = {},
- uint64_t actualPresentTime_ = {},
- uint64_t earliestPresentTime_ = {},
- uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT
- : presentID( presentID_ )
- , desiredPresentTime( desiredPresentTime_ )
- , actualPresentTime( actualPresentTime_ )
- , earliestPresentTime( earliestPresentTime_ )
- , presentMargin( presentMargin_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ ), fd( fd_ )
{}
- PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
return *this;
}
- operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+ ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryFdInfoKHR ) );
+ return *this;
}
- operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
+ ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- return ( presentID == rhs.presentID )
- && ( desiredPresentTime == rhs.desiredPresentTime )
- && ( actualPresentTime == rhs.actualPresentTime )
- && ( earliestPresentTime == rhs.earliestPresentTime )
- && ( presentMargin == rhs.presentMargin );
+ handleType = handleType_;
+ return *this;
}
- bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ fd = fd_;
+ return *this;
+ }
+
+
+ operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
+ }
+
+ operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default;
+#else
+ bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType )
+ && ( fd == rhs.fd );
+ }
+
+ bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t presentID = {};
- uint64_t desiredPresentTime = {};
- uint64_t actualPresentTime = {};
- uint64_t earliestPresentTime = {};
- uint64_t presentMargin = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
+
};
- static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceConfigurationAcquireInfoINTEL
+ template <>
+ struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
+ {
+ using Type = ImportMemoryFdInfoKHR;
+ };
+
+ struct ImportMemoryHostPointerInfoEXT
{
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void* pHostPointer_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ ), pHostPointer( pHostPointer_ )
{}
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
+ return *this;
}
- PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) );
return *this;
}
- PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ handleType = handleType_;
return *this;
}
- operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+ pHostPointer = pHostPointer_;
+ return *this;
}
- operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+
+ operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+ return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
}
- bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default;
+#else
+ bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( type == rhs.type );
+ && ( handleType == rhs.handleType )
+ && ( pHostPointer == rhs.pHostPointer );
}
- bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ void* pHostPointer = {};
+
};
- static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceCounterDescriptionKHR
+ template <>
+ struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
+ {
+ using Type = ImportMemoryHostPointerInfoEXT;
+ };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportMemoryWin32HandleInfoKHR
{
- PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& category_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , name{}
- , category{}
- , description{}
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ ), handle( handle_ ), name( name_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( category, category_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
return *this;
}
- PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
+ return *this;
}
- PerformanceCounterDescriptionKHR& operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
+ handleType = handleType_;
+ return *this;
}
- operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
+ handle = handle_;
+ return *this;
}
- bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+ {
+ name = name_;
+ return *this;
+ }
+
+
+ operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
+ }
+
+ operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default;
+#else
+ bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( category, rhs.category, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle )
+ && ( name == rhs.name );
}
- bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
- char name[VK_MAX_DESCRIPTION_SIZE] = {};
- char category[VK_MAX_DESCRIPTION_SIZE] = {};
- char description[VK_MAX_DESCRIPTION_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
+
};
- static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceCounterKHR
+ template <>
+ struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
{
- PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
- VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer,
- VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
- std::array<uint8_t,VK_UUID_SIZE> const& uuid_ = {} ) VULKAN_HPP_NOEXCEPT
- : unit( unit_ )
- , scope( scope_ )
- , storage( storage_ )
- , uuid{}
+ using Type = ImportMemoryWin32HandleInfoKHR;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportMemoryWin32HandleInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ ), handle( handle_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( uuid, uuid_ );
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::PerformanceCounterKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) - offsetof( PerformanceCounterKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
return *this;
}
- PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
+ return *this;
}
- PerformanceCounterKHR& operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
+ handleType = handleType_;
+ return *this;
}
- operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
+ handle = handle_;
+ return *this;
}
- bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
+ }
+
+ operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default;
+#else
+ bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( unit == rhs.unit )
- && ( scope == rhs.scope )
- && ( storage == rhs.storage )
- && ( memcmp( uuid, rhs.uuid, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 );
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle );
}
- bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
- VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
- VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
- uint8_t uuid[VK_UUID_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
+ HANDLE handle = {};
+
};
- static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
- union PerformanceCounterResultKHR
+ template <>
+ struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
{
- PerformanceCounterResultKHR( int32_t int32_ = {} )
+ using Type = ImportMemoryWin32HandleInfoNV;
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct InputAttachmentAspectReference
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : subpass( subpass_ ), inputAttachmentIndex( inputAttachmentIndex_ ), aspectMask( aspectMask_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- int32 = int32_;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceCounterResultKHR( int64_t int64_ )
+ InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- int64 = int64_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
+ return *this;
}
- PerformanceCounterResultKHR( uint32_t uint32_ )
+ InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- uint32 = uint32_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( InputAttachmentAspectReference ) );
+ return *this;
}
- PerformanceCounterResultKHR( uint64_t uint64_ )
+ InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
{
- uint64 = uint64_;
+ subpass = subpass_;
+ return *this;
}
- PerformanceCounterResultKHR( float float32_ )
+ InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
- float32 = float32_;
+ inputAttachmentIndex = inputAttachmentIndex_;
+ return *this;
}
- PerformanceCounterResultKHR( double float64_ )
+ InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
- float64 = float64_;
+ aspectMask = aspectMask_;
+ return *this;
}
- PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT
{
- int32 = int32_;
+ return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
+ }
+
+ operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( InputAttachmentAspectReference const& ) const = default;
+#else
+ bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( subpass == rhs.subpass )
+ && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
+ && ( aspectMask == rhs.aspectMask );
+ }
+
+ bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t subpass = {};
+ uint32_t inputAttachmentIndex = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+
+ };
+ static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+ using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+
+ struct InstanceCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {} )
+ : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
return *this;
}
- PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- int64 = int64_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( InstanceCreateInfo ) );
return *this;
}
- PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- uint32 = uint32_;
+ pNext = pNext_;
return *this;
}
- PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- uint64 = uint64_;
+ flags = flags_;
return *this;
}
- PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
{
- float32 = float32_;
+ pApplicationInfo = pApplicationInfo_;
return *this;
}
- PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
- float64 = float64_;
+ enabledLayerCount = enabledLayerCount_;
return *this;
}
- VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
+ ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
- operator VkPerformanceCounterResultKHR const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceCounterResultKHR*>(this);
+ enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+ ppEnabledLayerNames = pEnabledLayerNames_.data();
+ return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkPerformanceCounterResultKHR &()
+ InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceCounterResultKHR*>(this);
+ enabledExtensionCount = enabledExtensionCount_;
+ return *this;
}
- int32_t int32;
- int64_t int64;
- uint32_t uint32;
- uint64_t uint64;
- float float32;
- double float64;
+ InstanceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ppEnabledExtensionNames = ppEnabledExtensionNames_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ {
+ enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+ ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
+ }
+
+ operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkInstanceCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( InstanceCreateInfo const& ) const = default;
+#else
+ bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pApplicationInfo == rhs.pApplicationInfo )
+ && ( enabledLayerCount == rhs.enabledLayerCount )
+ && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+ && ( enabledExtensionCount == rhs.enabledExtensionCount )
+ && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
+ }
+
+ bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
+ const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {};
+ uint32_t enabledLayerCount = {};
+ const char* const * ppEnabledLayerNames = {};
+ uint32_t enabledExtensionCount = {};
+ const char* const * ppEnabledExtensionNames = {};
+
};
+ static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceMarkerInfoINTEL
+ template <>
+ struct CppType<StructureType, StructureType::eInstanceCreateInfo>
+ {
+ using Type = InstanceCreateInfo;
+ };
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ struct MacOSSurfaceCreateInfoMVK
{
- VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pView( pView_ )
{}
- VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
+ return *this;
}
- PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
return *this;
}
- PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
+ MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
- marker = marker_;
+ flags = flags_;
return *this;
}
- operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
+ pView = pView_;
+ return *this;
}
- operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+
+ operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
+ return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
}
- bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default;
+#else
+ bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( marker == rhs.marker );
+ && ( flags == rhs.flags )
+ && ( pView == rhs.pView );
}
- bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
const void* pNext = {};
- uint64_t marker = {};
+ VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
+ const void* pView = {};
+
};
- static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceOverrideInfoINTEL
+ template <>
+ struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
{
- VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware,
- VULKAN_HPP_NAMESPACE::Bool32 enable_ = {},
- uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , enable( enable_ )
- , parameter( parameter_ )
+ using Type = MacOSSurfaceCreateInfoMVK;
+ };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ struct MemoryAllocateFlagsInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), deviceMask( deviceMask_ )
{}
- VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
return *this;
}
- PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryAllocateFlagsInfo ) );
+ return *this;
+ }
+
+ MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceMask = deviceMask_;
+ return *this;
+ }
+
+
+ operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
+ }
+
+ operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default;
+#else
+ bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( deviceMask == rhs.deviceMask );
+ }
+
+ bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
+ uint32_t deviceMask = {};
+
+ };
+ static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
+ {
+ using Type = MemoryAllocateFlagsInfo;
+ };
+ using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+
+ struct MemoryDedicatedAllocateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+ : image( image_ ), buffer( buffer_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
return *this;
}
- PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryDedicatedAllocateInfo ) );
return *this;
}
- PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ pNext = pNext_;
return *this;
}
- PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
- enable = enable_;
+ image = image_;
return *this;
}
- PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
- parameter = parameter_;
+ buffer = buffer_;
return *this;
}
- operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
+ return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
}
- operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
+ return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
}
- bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default;
+#else
+ bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( type == rhs.type )
- && ( enable == rhs.enable )
- && ( parameter == rhs.parameter );
+ && ( image == rhs.image )
+ && ( buffer == rhs.buffer );
}
- bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
- VULKAN_HPP_NAMESPACE::Bool32 enable = {};
- uint64_t parameter = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
};
- static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceQuerySubmitInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
+ {
+ using Type = MemoryDedicatedAllocateInfo;
+ };
+ using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+
+ struct MemoryDedicatedRequirements
{
- VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : counterPassIndex( counterPassIndex_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+ : prefersDedicatedAllocation( prefersDedicatedAllocation_ ), requiresDedicatedAllocation( requiresDedicatedAllocation_ )
{}
- VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
return *this;
}
- PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryDedicatedRequirements ) );
+ return *this;
+ }
+
+
+ operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
+ }
+
+ operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryDedicatedRequirements const& ) const = default;
+#else
+ bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
+ && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+ }
+
+ bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
+
+ };
+ static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
+ {
+ using Type = MemoryDedicatedRequirements;
+ };
+ using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+
+ struct MemoryOpaqueCaptureAddressAllocateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
+ : opaqueCaptureAddress( opaqueCaptureAddress_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceQuerySubmitInfoKHR& operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
return *this;
}
- PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) );
+ return *this;
+ }
+
+ MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
+ MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
{
- counterPassIndex = counterPassIndex_;
+ opaqueCaptureAddress = opaqueCaptureAddress_;
return *this;
}
- operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
+ return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
}
- operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
+ return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
}
- bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const& ) const = default;
+#else
+ bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( counterPassIndex == rhs.counterPassIndex );
+ && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
}
- bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
const void* pNext = {};
- uint32_t counterPassIndex = {};
+ uint64_t opaqueCaptureAddress = {};
+
};
- static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- struct PerformanceStreamMarkerInfoINTEL
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
+ {
+ using Type = MemoryOpaqueCaptureAddressAllocateInfo;
+ };
+ using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
+
+ struct MemoryPriorityAllocateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}) VULKAN_HPP_NOEXCEPT
+ : priority( priority_ )
{}
- VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
+ return *this;
}
- PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryPriorityAllocateInfoEXT ) );
return *this;
}
- PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+ MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
{
- marker = marker_;
+ priority = priority_;
return *this;
}
- operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+ return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
}
- operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+ return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
}
- bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default;
+#else
+ bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( marker == rhs.marker );
+ && ( priority == rhs.priority );
}
- bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
const void* pNext = {};
- uint32_t marker = {};
+ float priority = {};
+
};
- static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- union PerformanceValueDataINTEL
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
{
- PerformanceValueDataINTEL( uint32_t value32_ = {} )
+ using Type = MemoryPriorityAllocateInfoEXT;
+ };
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+ struct MetalSurfaceCreateInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer* pLayer_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), pLayer( pLayer_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- value32 = value32_;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceValueDataINTEL( uint64_t value64_ )
+ MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- value64 = value64_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
+ return *this;
}
- PerformanceValueDataINTEL( float valueFloat_ )
+ MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- valueFloat = valueFloat_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( MetalSurfaceCreateInfoEXT ) );
+ return *this;
}
- PerformanceValueDataINTEL( const char* valueString_ )
+ MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- valueString = valueString_;
+ pNext = pNext_;
+ return *this;
}
- PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
+ MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
- value32 = value32_;
+ flags = flags_;
return *this;
}
- PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
+ MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT
{
- value64 = value64_;
+ pLayer = pLayer_;
return *this;
}
- PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- valueFloat = valueFloat_;
+ return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
+ }
+
+ operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default;
+#else
+ bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pLayer == rhs.pLayer );
+ }
+
+ bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
+ const CAMetalLayer* pLayer = {};
+
+ };
+ static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
+ {
+ using Type = MetalSurfaceCreateInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ union PerformanceCounterResultKHR
+ {
+ PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
+ }
+
+ PerformanceCounterResultKHR( int32_t int32_ = {} )
+ : int32( int32_ )
+ {}
+
+ PerformanceCounterResultKHR( int64_t int64_ )
+ : int64( int64_ )
+ {}
+
+ PerformanceCounterResultKHR( uint32_t uint32_ )
+ : uint32( uint32_ )
+ {}
+
+ PerformanceCounterResultKHR( uint64_t uint64_ )
+ : uint64( uint64_ )
+ {}
+
+ PerformanceCounterResultKHR( float float32_ )
+ : float32( float32_ )
+ {}
+
+ PerformanceCounterResultKHR( double float64_ )
+ : float64( float64_ )
+ {}
+
+ PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ int32 = int32_;
return *this;
}
- PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
{
- valueBool = valueBool_;
+ int64 = int64_;
return *this;
}
- PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
{
- valueString = valueString_;
+ uint32 = uint32_;
return *this;
}
- VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
+ uint64 = uint64_;
return *this;
}
- operator VkPerformanceValueDataINTEL const&() const
+ PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceValueDataINTEL*>(this);
+ float32 = float32_;
+ return *this;
}
- operator VkPerformanceValueDataINTEL &()
+ PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceValueDataINTEL*>(this);
+ float64 = float64_;
+ return *this;
}
-#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
- uint32_t value32;
- uint64_t value64;
- float valueFloat;
- VULKAN_HPP_NAMESPACE::Bool32 valueBool;
- const char* valueString;
-#else
- uint32_t value32;
- uint64_t value64;
- float valueFloat;
- VkBool32 valueBool;
- const char* valueString;
-#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+ VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
+ return *this;
+ }
+
+ operator VkPerformanceCounterResultKHR const&() const
+ {
+ return *reinterpret_cast<const VkPerformanceCounterResultKHR*>(this);
+ }
+
+ operator VkPerformanceCounterResultKHR &()
+ {
+ return *reinterpret_cast<VkPerformanceCounterResultKHR*>(this);
+ }
+
+ int32_t int32;
+ int64_t int64;
+ uint32_t uint32;
+ uint64_t uint64;
+ float float32;
+ double float64;
};
- struct PerformanceValueINTEL
+ struct PerformanceQuerySubmitInfoKHR
{
- PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32,
- VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , data( data_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : counterPassIndex( counterPassIndex_ )
{}
- PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
return *this;
}
- PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceQuerySubmitInfoKHR ) );
return *this;
}
- PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- data = data_;
+ pNext = pNext_;
return *this;
}
- operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT
+ PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
+ counterPassIndex = counterPassIndex_;
+ return *this;
}
- operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
+ return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
+ }
+
+ operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceQuerySubmitInfoKHR const& ) const = default;
+#else
+ bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( counterPassIndex == rhs.counterPassIndex );
+ }
+
+ bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
public:
- VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
- VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
+ const void* pNext = {};
+ uint32_t counterPassIndex = {};
+
+ };
+ static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
+ {
+ using Type = PerformanceQuerySubmitInfoKHR;
};
- static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDevice16BitStorageFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
- , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
- , storagePushConstant16( storagePushConstant16_ )
- , storageInputOutput16( storageInputOutput16_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}) VULKAN_HPP_NOEXCEPT
+ : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) );
return *this;
}
@@ -41933,6 +60729,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
@@ -41943,6 +60740,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default;
+#else
bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -41957,6 +60758,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
@@ -41965,34 +60769,140 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
+
};
static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDevice8BitStorageFeatures
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
{
- VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
- , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
- , storagePushConstant8( storagePushConstant8_ )
+ using Type = PhysicalDevice16BitStorageFeatures;
+ };
+ using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+
+ struct PhysicalDevice4444FormatsFeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}) VULKAN_HPP_NOEXCEPT
+ : formatA4R4G4B4( formatA4R4G4B4_ ), formatA4B4G4R4( formatA4B4G4R4_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) - offsetof( PhysicalDevice8BitStorageFeatures, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
return *this;
}
+ PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevice4444FormatsFeaturesEXT ) );
+ return *this;
+ }
+
+ PhysicalDevice4444FormatsFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
+ {
+ formatA4R4G4B4 = formatA4R4G4B4_;
+ return *this;
+ }
+
+ PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
+ {
+ formatA4B4G4R4 = formatA4B4G4R4_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDevice4444FormatsFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
+ }
+
+ operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 )
+ && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
+ }
+
+ bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
+
+ };
+ static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevice4444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
+ {
+ using Type = PhysicalDevice4444FormatsFeaturesEXT;
+ };
+
+ struct PhysicalDevice8BitStorageFeatures
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}) VULKAN_HPP_NOEXCEPT
+ : storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDevice8BitStorageFeatures& operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevice8BitStorageFeatures ) );
return *this;
}
@@ -42020,6 +60930,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
@@ -42030,6 +60941,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevice8BitStorageFeatures const& ) const = default;
+#else
bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42043,6 +60958,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
@@ -42050,30 +60968,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
+
};
static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
+ {
+ using Type = PhysicalDevice8BitStorageFeatures;
+ };
+ using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
+
struct PhysicalDeviceASTCDecodeFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT
- : decodeModeSharedExponent( decodeModeSharedExponent_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}) VULKAN_HPP_NOEXCEPT
+ : decodeModeSharedExponent( decodeModeSharedExponent_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) );
return *this;
}
@@ -42089,6 +61022,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
@@ -42099,6 +61033,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42110,35 +61048,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
+
};
static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
+ {
+ using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
+ };
+
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT
- : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}) VULKAN_HPP_NOEXCEPT
+ : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
return *this;
}
@@ -42154,6 +61109,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
@@ -42164,6 +61120,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42175,48 +61135,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
+
};
static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
+ {
+ using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+ };
+
struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
{
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT
- : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
- , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
- , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
- , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
- , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
- , advancedBlendAllOperations( advancedBlendAllOperations_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}) VULKAN_HPP_NOEXCEPT
+ : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ), advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ), advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ), advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ), advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ), advancedBlendAllOperations( advancedBlendAllOperations_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
@@ -42227,6 +61195,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42243,6 +61215,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
@@ -42253,34 +61228,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
+
};
static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
+ {
+ using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+ };
+
struct PhysicalDeviceBufferDeviceAddressFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
- : bufferDeviceAddress( bufferDeviceAddress_ )
- , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
- , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
+ : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) - offsetof( PhysicalDeviceBufferDeviceAddressFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceBufferDeviceAddressFeatures& operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) );
return *this;
}
@@ -42308,6 +61293,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
@@ -42318,6 +61304,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42331,6 +61321,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
@@ -42338,34 +61331,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+
};
static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
+ {
+ using Type = PhysicalDeviceBufferDeviceAddressFeatures;
+ };
+ using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
+
struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
- : bufferDeviceAddress( bufferDeviceAddress_ )
- , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
- , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
+ : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) );
return *this;
}
@@ -42393,6 +61397,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
@@ -42403,6 +61408,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42416,6 +61425,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
@@ -42423,30 +61435,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+
};
static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
+ {
+ using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+ };
+ using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
struct PhysicalDeviceCoherentMemoryFeaturesAMD
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceCoherentMemory( deviceCoherentMemory_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceCoherentMemory( deviceCoherentMemory_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) );
return *this;
}
@@ -42462,6 +61489,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
@@ -42472,6 +61500,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default;
+#else
bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42483,37 +61515,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
+
};
static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
+ {
+ using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
+ };
+
struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT
- : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
- , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}) VULKAN_HPP_NOEXCEPT
+ : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ), computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) );
return *this;
}
@@ -42535,6 +61582,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
@@ -42545,6 +61593,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42557,38 +61609,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
+
};
static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
+ {
+ using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+ };
+
struct PhysicalDeviceConditionalRenderingFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT
- : conditionalRendering( conditionalRendering_ )
- , inheritedConditionalRendering( inheritedConditionalRendering_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}) VULKAN_HPP_NOEXCEPT
+ : conditionalRendering( conditionalRendering_ ), inheritedConditionalRendering( inheritedConditionalRendering_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) );
return *this;
}
@@ -42610,6 +61677,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
@@ -42620,6 +61688,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42632,55 +61704,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
+
};
static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
+ {
+ using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
+ };
+
struct PhysicalDeviceConservativeRasterizationPropertiesEXT
{
- PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {},
- float maxExtraPrimitiveOverestimationSize_ = {},
- float extraPrimitiveOverestimationSizeGranularity_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT
- : primitiveOverestimationSize( primitiveOverestimationSize_ )
- , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
- , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
- , primitiveUnderestimation( primitiveUnderestimation_ )
- , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
- , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
- , degenerateLinesRasterized( degenerateLinesRasterized_ )
- , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
- , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}) VULKAN_HPP_NOEXCEPT
+ : primitiveOverestimationSize( primitiveOverestimationSize_ ), maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ), extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ), primitiveUnderestimation( primitiveUnderestimation_ ), conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ), degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ), degenerateLinesRasterized( degenerateLinesRasterized_ ), fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ), conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
@@ -42691,6 +61765,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42710,6 +61788,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
@@ -42723,32 +61804,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
+
};
static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
+ {
+ using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
+ };
+
struct PhysicalDeviceCooperativeMatrixFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : cooperativeMatrix( cooperativeMatrix_ )
- , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}) VULKAN_HPP_NOEXCEPT
+ : cooperativeMatrix( cooperativeMatrix_ ), cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) );
return *this;
}
@@ -42770,6 +61863,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
@@ -42780,6 +61874,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42792,39 +61890,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
+
};
static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
+ {
+ using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
+ };
+
struct PhysicalDeviceCooperativeMatrixPropertiesNV
{
- PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT
- : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}) VULKAN_HPP_NOEXCEPT
+ : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
return *this;
}
+ PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
@@ -42835,6 +61951,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42846,35 +61966,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
+
};
static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
+ {
+ using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
+ };
+
struct PhysicalDeviceCornerSampledImageFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT
- : cornerSampledImage( cornerSampledImage_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}) VULKAN_HPP_NOEXCEPT
+ : cornerSampledImage( cornerSampledImage_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) );
return *this;
}
@@ -42890,6 +62027,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
@@ -42900,6 +62038,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42911,35 +62053,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
+
};
static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
+ {
+ using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
+ };
+
struct PhysicalDeviceCoverageReductionModeFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT
- : coverageReductionMode( coverageReductionMode_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}) VULKAN_HPP_NOEXCEPT
+ : coverageReductionMode( coverageReductionMode_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) );
return *this;
}
@@ -42955,6 +62114,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
@@ -42965,6 +62125,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -42976,35 +62140,222 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
+
};
static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
+ {
+ using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
+ };
+
+ struct PhysicalDeviceCustomBorderColorFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}) VULKAN_HPP_NOEXCEPT
+ : customBorderColors( customBorderColors_ ), customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) );
+ return *this;
+ }
+
+ PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
+ {
+ customBorderColors = customBorderColors_;
return *this;
}
+ PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( customBorderColors == rhs.customBorderColors )
+ && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
+ }
+
+ bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
+ VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
+ {
+ using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
+ };
+
+ struct PhysicalDeviceCustomBorderColorPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) );
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
+ }
+
+ bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+ void* pNext = {};
+ uint32_t maxCustomBorderColorSamplers = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
+ {
+ using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
+ };
+
+ struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}) VULKAN_HPP_NOEXCEPT
+ : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) );
return *this;
}
@@ -43020,6 +62371,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
@@ -43030,6 +62382,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -43041,35 +62397,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
+
};
static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
+ {
+ using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+ };
+
struct PhysicalDeviceDepthClipEnableFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : depthClipEnable( depthClipEnable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : depthClipEnable( depthClipEnable_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) );
return *this;
}
@@ -43085,6 +62458,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
@@ -43095,6 +62469,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -43106,44 +62484,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
+
};
static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
+ {
+ using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
+ };
+
struct PhysicalDeviceDepthStencilResolveProperties
{
- PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportedDepthResolveModes( supportedDepthResolveModes_ )
- , supportedStencilResolveModes( supportedStencilResolveModes_ )
- , independentResolveNone( independentResolveNone_ )
- , independentResolve( independentResolve_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}) VULKAN_HPP_NOEXCEPT
+ : supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) - offsetof( PhysicalDeviceDepthStencilResolveProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceDepthStencilResolveProperties& operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDepthStencilResolveProperties ) );
return *this;
}
+
operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
@@ -43154,6 +62544,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -43168,6 +62562,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
@@ -43176,68 +62573,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+
};
static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolveProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
+ {
+ using Type = PhysicalDeviceDepthStencilResolveProperties;
+ };
+ using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
+
struct PhysicalDeviceDescriptorIndexingFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
- , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
- , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
- , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
- , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
- , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
- , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
- , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
- , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
- , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
- , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
- , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
- , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
- , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
- , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
- , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
- , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
- , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
- , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
- , runtimeDescriptorArray( runtimeDescriptorArray_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) - offsetof( PhysicalDeviceDescriptorIndexingFeatures, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceDescriptorIndexingFeatures& operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeatures ) );
return *this;
}
@@ -43367,6 +62741,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
@@ -43377,6 +62752,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -43407,6 +62786,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
@@ -43431,77 +62813,49 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+
};
static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
+ {
+ using Type = PhysicalDeviceDescriptorIndexingFeatures;
+ };
+ using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
+
struct PhysicalDeviceDescriptorIndexingProperties
{
- PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
- uint32_t maxPerStageUpdateAfterBindResources_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
- , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
- , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
- , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
- , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
- , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
- , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
- , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
- , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
- , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
- , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
- , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
- , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
- , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
- , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
- , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
- , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
- , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
- , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
- , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
- , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
- , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
- , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) - offsetof( PhysicalDeviceDescriptorIndexingProperties, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceDescriptorIndexingProperties& operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDescriptorIndexingProperties ) );
return *this;
}
+
operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
@@ -43512,6 +62866,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -43545,6 +62903,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
@@ -43572,649 +62933,808 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+
};
static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingProperties>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceDiscardRectanglePropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
{
- PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxDiscardRectangles( maxDiscardRectangles_ )
+ using Type = PhysicalDeviceDescriptorIndexingProperties;
+ };
+ using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
+
+ struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceGeneratedCommands( deviceGeneratedCommands_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
return *this;
}
- PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) );
+ return *this;
}
- PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+ deviceGeneratedCommands = deviceGeneratedCommands_;
+ return *this;
}
- operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
}
- bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
+ && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
}
- bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
void* pNext = {};
- uint32_t maxDiscardRectangles = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
+
};
- static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceDriverProperties
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
{
- PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
- std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {},
- std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {},
- VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : driverID( driverID_ )
- , driverName{}
- , driverInfo{}
- , conformanceVersion( conformanceVersion_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE,VK_MAX_DRIVER_NAME_SIZE>::copy( driverName, driverName_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE,VK_MAX_DRIVER_INFO_SIZE>::copy( driverInfo, driverInfo_ );
- }
+ using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+ };
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ), maxIndirectSequenceCount( maxIndirectSequenceCount_ ), maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ), maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ), maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ), maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ), minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ), minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ), minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) - offsetof( PhysicalDeviceDriverProperties, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
+ return *this;
}
- PhysicalDeviceDriverProperties& operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) );
return *this;
}
- operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
}
- operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
}
- bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( driverID == rhs.driverID )
- && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ) == 0 )
- && ( conformanceVersion == rhs.conformanceVersion );
+ && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount )
+ && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
+ && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
+ && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount )
+ && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
+ && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride )
+ && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment )
+ && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment )
+ && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
}
- bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
- char driverName[VK_MAX_DRIVER_NAME_SIZE] = {};
- char driverInfo[VK_MAX_DRIVER_INFO_SIZE] = {};
- VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+ uint32_t maxGraphicsShaderGroupCount = {};
+ uint32_t maxIndirectSequenceCount = {};
+ uint32_t maxIndirectCommandsTokenCount = {};
+ uint32_t maxIndirectCommandsStreamCount = {};
+ uint32_t maxIndirectCommandsTokenOffset = {};
+ uint32_t maxIndirectCommandsStreamStride = {};
+ uint32_t minSequencesCountBufferOffsetAlignment = {};
+ uint32_t minSequencesIndexBufferOffsetAlignment = {};
+ uint32_t minIndirectCommandsBufferOffsetAlignment = {};
+
};
- static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceExclusiveScissorFeaturesNV
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
+ {
+ using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+ };
+
+ struct PhysicalDeviceDiagnosticsConfigFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT
- : exclusiveScissor( exclusiveScissor_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}) VULKAN_HPP_NOEXCEPT
+ : diagnosticsConfig( diagnosticsConfig_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
+ return *this;
}
- PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) );
return *this;
}
- PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
{
- exclusiveScissor = exclusiveScissor_;
+ diagnosticsConfig = diagnosticsConfig_;
return *this;
}
- operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
}
- operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
}
- bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( exclusiveScissor == rhs.exclusiveScissor );
+ && ( diagnosticsConfig == rhs.diagnosticsConfig );
}
- bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
+ VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
+
};
- static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceExternalBufferInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , usage( usage_ )
- , handleType( handleType_ )
+ using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
+ };
+
+ struct PhysicalDeviceDiscardRectanglePropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxDiscardRectangles( maxDiscardRectangles_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>(&rhs);
- return *this;
- }
-
- PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
return *this;
}
- PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
return *this;
}
- PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
- {
- usage = usage_;
- return *this;
- }
- PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
- return *this;
+ return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
}
- operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
}
- operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
- }
- bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( usage == rhs.usage )
- && ( handleType == rhs.handleType );
+ && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
}
- bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+ void* pNext = {};
+ uint32_t maxDiscardRectangles = {};
+
};
- static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceExternalFenceInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
+ };
+
+ struct PhysicalDeviceDriverProperties
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}) VULKAN_HPP_NOEXCEPT
+ : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
return *this;
}
- PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDriverProperties ) );
return *this;
}
- PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
- {
- handleType = handleType_;
- return *this;
- }
- operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
}
- operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
}
- bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceDriverProperties const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType );
+ && ( driverID == rhs.driverID )
+ && ( driverName == rhs.driverName )
+ && ( driverInfo == rhs.driverInfo )
+ && ( conformanceVersion == rhs.conformanceVersion );
}
- bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+
};
- static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceExternalImageFormatInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
+ {
+ using Type = PhysicalDeviceDriverProperties;
+ };
+ using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
+
+ struct PhysicalDeviceExclusiveScissorFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}) VULKAN_HPP_NOEXCEPT
+ : exclusiveScissor( exclusiveScissor_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
+ return *this;
}
- PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) );
return *this;
}
- PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
{
- handleType = handleType_;
+ exclusiveScissor = exclusiveScissor_;
return *this;
}
- operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
}
- operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
}
- bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( handleType == rhs.handleType );
+ && ( exclusiveScissor == rhs.exclusiveScissor );
}
- bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
+
};
- static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceExternalMemoryHostPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
+ {
+ using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
+ };
+
+ struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
{
- PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}) VULKAN_HPP_NOEXCEPT
+ : extendedDynamicState( extendedDynamicState_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) );
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
return *this;
}
- PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) );
+ return *this;
}
- PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+ extendedDynamicState = extendedDynamicState_;
+ return *this;
}
- operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
}
- bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
+ && ( extendedDynamicState == rhs.extendedDynamicState );
}
- bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
+
};
- static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceExternalSemaphoreInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
+ {
+ using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
+ };
+
+ struct PhysicalDeviceExternalImageFormatInfo
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) );
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
+ return *this;
}
- PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) );
return *this;
}
- PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
- operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
}
- operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
}
- bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType );
}
- bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
};
- static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceFeatures2
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT
- : features( features_ )
+ using Type = PhysicalDeviceExternalImageFormatInfo;
+ };
+ using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+
+ struct PhysicalDeviceExternalMemoryHostPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+ : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
return *this;
}
- PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) );
return *this;
}
- PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT
- {
- features = features_;
- return *this;
- }
- operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
}
- operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
}
- bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( features == rhs.features );
+ && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
}
- bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
+ {
+ using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
};
- static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceFloatControlsProperties
{
- PhysicalDeviceFloatControlsProperties( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT
- : denormBehaviorIndependence( denormBehaviorIndependence_ )
- , roundingModeIndependence( roundingModeIndependence_ )
- , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
- , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
- , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
- , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
- , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
- , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
- , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
- , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
- , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
- , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
- , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
- , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
- , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
- , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
- , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) - offsetof( PhysicalDeviceFloatControlsProperties, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}) VULKAN_HPP_NOEXCEPT
+ : denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceFloatControlsProperties& operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
return *this;
}
+ PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFloatControlsProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
@@ -44225,6 +63745,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFloatControlsProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44252,6 +63776,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
@@ -44273,37 +63800,241 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+
};
static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsProperties>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceFragmentDensityMapFeaturesEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
+ {
+ using Type = PhysicalDeviceFloatControlsProperties;
+ };
+ using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
+
+ struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
{
- PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMap( fragmentDensityMap_ )
- , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ )
- , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}) VULKAN_HPP_NOEXCEPT
+ : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) );
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) );
+ fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
return *this;
}
+
+ operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
+ }
+
+ bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
+ {
+ using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
+ };
+
+ struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+ : subsampledLoads( subsampledLoads_ ), subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ), maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ), maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) );
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( subsampledLoads == rhs.subsampledLoads )
+ && ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess )
+ && ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers )
+ && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
+ }
+
+ bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
+ uint32_t maxSubsampledArrayLayers = {};
+ uint32_t maxDescriptorSetSubsampledSamplers = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
+ {
+ using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
+ };
+
+ struct PhysicalDeviceFragmentDensityMapFeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}) VULKAN_HPP_NOEXCEPT
+ : fragmentDensityMap( fragmentDensityMap_ ), fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ), fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) );
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ fragmentDensityMap = fragmentDensityMap_;
+ return *this;
+ }
+
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
+ {
+ fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
+ return *this;
+ }
- PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>(&rhs);
+ fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
return *this;
}
+
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
@@ -44314,6 +64045,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44327,6 +64062,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
@@ -44334,37 +64072,48 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
+
};
static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
+ {
+ using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
+ };
+
struct PhysicalDeviceFragmentDensityMapPropertiesEXT
{
- PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
- , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ )
- , fragmentDensityInvocations( fragmentDensityInvocations_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}) VULKAN_HPP_NOEXCEPT
+ : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ), maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ), fragmentDensityInvocations( fragmentDensityInvocations_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) );
return *this;
}
+
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
@@ -44375,6 +64124,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44388,6 +64141,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
@@ -44395,30 +64151,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
+
};
static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
+ {
+ using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
+ };
+
struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}) VULKAN_HPP_NOEXCEPT
+ : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) );
return *this;
}
@@ -44434,6 +64204,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
@@ -44444,6 +64215,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44455,39 +64230,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
+
};
static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV>
+ {
+ using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+ };
+
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
- , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
- , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}) VULKAN_HPP_NOEXCEPT
+ : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ), fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ), fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) );
return *this;
}
@@ -44515,6 +64303,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
@@ -44525,6 +64314,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44538,6 +64331,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
@@ -44545,39 +64341,48 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
+
};
static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
+ {
+ using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+ };
+
struct PhysicalDeviceGroupProperties
{
- PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : physicalDeviceCount( physicalDeviceCount_ )
- , physicalDevices{}
- , subsetAllocation( subsetAllocation_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE,VK_MAX_DEVICE_GROUP_SIZE>::copy( physicalDevices, physicalDevices_ );
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) );
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+ : physicalDeviceCount( physicalDeviceCount_ ), physicalDevices( physicalDevices_ ), subsetAllocation( subsetAllocation_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceGroupProperties ) );
return *this;
}
+
operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
@@ -44588,12 +64393,16 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
- && ( memcmp( physicalDevices, rhs.physicalDevices, std::min<uint32_t>( VK_MAX_DEVICE_GROUP_SIZE, physicalDeviceCount ) * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 )
+ && ( physicalDevices == rhs.physicalDevices )
&& ( subsetAllocation == rhs.subsetAllocation );
}
@@ -44601,37 +64410,55 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
void* pNext = {};
uint32_t physicalDeviceCount = {};
- VULKAN_HPP_NAMESPACE::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
+
};
static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
+ {
+ using Type = PhysicalDeviceGroupProperties;
+ };
+ using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+
struct PhysicalDeviceHostQueryResetFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT
- : hostQueryReset( hostQueryReset_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}) VULKAN_HPP_NOEXCEPT
+ : hostQueryReset( hostQueryReset_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) - offsetof( PhysicalDeviceHostQueryResetFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceHostQueryResetFeatures& operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceHostQueryResetFeatures ) );
return *this;
}
@@ -44647,6 +64474,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
@@ -44657,6 +64485,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceHostQueryResetFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44668,50 +64500,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
+
};
static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
+ {
+ using Type = PhysicalDeviceHostQueryResetFeatures;
+ };
+ using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
+
struct PhysicalDeviceIDProperties
{
- PhysicalDeviceIDProperties( std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {},
- std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {},
- std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {},
- uint32_t deviceNodeMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceUUID{}
- , driverUUID{}
- , deviceLUID{}
- , deviceNodeMask( deviceNodeMask_ )
- , deviceLUIDValid( deviceLUIDValid_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( deviceUUID, deviceUUID_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( driverUUID, driverUUID_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_LUID_SIZE,VK_LUID_SIZE>::copy( deviceLUID, deviceLUID_ );
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) );
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
return *this;
}
+ PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceIDProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
@@ -44722,13 +64561,17 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceIDProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
- && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
- && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( deviceUUID == rhs.deviceUUID )
+ && ( driverUUID == rhs.driverUUID )
+ && ( deviceLUID == rhs.deviceLUID )
&& ( deviceNodeMask == rhs.deviceNodeMask )
&& ( deviceLUIDValid == rhs.deviceLUIDValid );
}
@@ -44737,45 +64580,63 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
void* pNext = {};
- uint8_t deviceUUID[VK_UUID_SIZE] = {};
- uint8_t driverUUID[VK_UUID_SIZE] = {};
- uint8_t deviceLUID[VK_LUID_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
uint32_t deviceNodeMask = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
+
};
static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
+ {
+ using Type = PhysicalDeviceIDProperties;
+ };
+ using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+
struct PhysicalDeviceImageDrmFormatModifierInfoEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
- , sharingMode( sharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
+ : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) );
return *this;
}
@@ -44809,6 +64670,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+ pQueueFamilyIndices = queueFamilyIndices_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
@@ -44819,6 +64690,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -44833,6 +64708,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
@@ -44841,135 +64719,131 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t* pQueueFamilyIndices = {};
+
};
static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceImageFormatInfo2
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , type( type_ )
- , tiling( tiling_ )
- , usage( usage_ )
- , flags( flags_ )
+ using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
+ };
+
+ struct PhysicalDeviceImageRobustnessFeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}) VULKAN_HPP_NOEXCEPT
+ : robustImageAccess( robustImageAccess_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageRobustnessFeaturesEXT & operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT const *>( &rhs );
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageRobustnessFeaturesEXT & operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) );
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- format = format_;
+ pNext = pNext_;
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageRobustnessFeaturesEXT & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
{
- type = type_;
+ robustImageAccess = robustImageAccess_;
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
- {
- tiling = tiling_;
- return *this;
- }
- PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageRobustnessFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- usage = usage_;
- return *this;
+ return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeaturesEXT*>( this );
}
- PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
- return *this;
+ return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeaturesEXT*>( this );
}
- operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
- }
- operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
- }
-
- bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( format == rhs.format )
- && ( type == rhs.type )
- && ( tiling == rhs.tiling )
- && ( usage == rhs.usage )
- && ( flags == rhs.flags );
+ && ( robustImageAccess == rhs.robustImageAccess );
}
- bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
- VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) == sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceImageRobustnessFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT>
+ {
+ using Type = PhysicalDeviceImageRobustnessFeaturesEXT;
};
- static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceImageViewImageFormatInfoEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
- : imageViewType( imageViewType_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D) VULKAN_HPP_NOEXCEPT
+ : imageViewType( imageViewType_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) );
return *this;
}
@@ -44985,6 +64859,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
@@ -44995,6 +64870,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45006,35 +64885,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+
};
static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
+ {
+ using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
+ };
+
struct PhysicalDeviceImagelessFramebufferFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : imagelessFramebuffer( imagelessFramebuffer_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}) VULKAN_HPP_NOEXCEPT
+ : imagelessFramebuffer( imagelessFramebuffer_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) - offsetof( PhysicalDeviceImagelessFramebufferFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceImagelessFramebufferFeatures& operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImagelessFramebufferFeatures ) );
return *this;
}
@@ -45050,6 +64946,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
@@ -45060,6 +64957,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45071,35 +64972,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
+
};
static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
+ {
+ using Type = PhysicalDeviceImagelessFramebufferFeatures;
+ };
+ using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
+
struct PhysicalDeviceIndexTypeUint8FeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT
- : indexTypeUint8( indexTypeUint8_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}) VULKAN_HPP_NOEXCEPT
+ : indexTypeUint8( indexTypeUint8_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) );
return *this;
}
@@ -45115,6 +65034,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
@@ -45125,6 +65045,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45136,37 +65060,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
+
};
static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
+ {
+ using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT;
+ };
+
struct PhysicalDeviceInlineUniformBlockFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
- : inlineUniformBlock( inlineUniformBlock_ )
- , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT
+ : inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) );
return *this;
}
@@ -45188,6 +65127,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
@@ -45198,6 +65138,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45210,47 +65154,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
+
};
static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT>
+ {
+ using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT;
+ };
+
struct PhysicalDeviceInlineUniformBlockPropertiesEXT
{
- PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {},
- uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
- uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
- , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
- , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
- , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
- , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
@@ -45261,6 +65215,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45276,6 +65234,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
@@ -45285,510 +65246,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
uint32_t maxDescriptorSetInlineUniformBlocks = {};
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
+
};
static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceLimits
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT>
{
- PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {},
- uint32_t maxImageDimension2D_ = {},
- uint32_t maxImageDimension3D_ = {},
- uint32_t maxImageDimensionCube_ = {},
- uint32_t maxImageArrayLayers_ = {},
- uint32_t maxTexelBufferElements_ = {},
- uint32_t maxUniformBufferRange_ = {},
- uint32_t maxStorageBufferRange_ = {},
- uint32_t maxPushConstantsSize_ = {},
- uint32_t maxMemoryAllocationCount_ = {},
- uint32_t maxSamplerAllocationCount_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {},
- uint32_t maxBoundDescriptorSets_ = {},
- uint32_t maxPerStageDescriptorSamplers_ = {},
- uint32_t maxPerStageDescriptorUniformBuffers_ = {},
- uint32_t maxPerStageDescriptorStorageBuffers_ = {},
- uint32_t maxPerStageDescriptorSampledImages_ = {},
- uint32_t maxPerStageDescriptorStorageImages_ = {},
- uint32_t maxPerStageDescriptorInputAttachments_ = {},
- uint32_t maxPerStageResources_ = {},
- uint32_t maxDescriptorSetSamplers_ = {},
- uint32_t maxDescriptorSetUniformBuffers_ = {},
- uint32_t maxDescriptorSetUniformBuffersDynamic_ = {},
- uint32_t maxDescriptorSetStorageBuffers_ = {},
- uint32_t maxDescriptorSetStorageBuffersDynamic_ = {},
- uint32_t maxDescriptorSetSampledImages_ = {},
- uint32_t maxDescriptorSetStorageImages_ = {},
- uint32_t maxDescriptorSetInputAttachments_ = {},
- uint32_t maxVertexInputAttributes_ = {},
- uint32_t maxVertexInputBindings_ = {},
- uint32_t maxVertexInputAttributeOffset_ = {},
- uint32_t maxVertexInputBindingStride_ = {},
- uint32_t maxVertexOutputComponents_ = {},
- uint32_t maxTessellationGenerationLevel_ = {},
- uint32_t maxTessellationPatchSize_ = {},
- uint32_t maxTessellationControlPerVertexInputComponents_ = {},
- uint32_t maxTessellationControlPerVertexOutputComponents_ = {},
- uint32_t maxTessellationControlPerPatchOutputComponents_ = {},
- uint32_t maxTessellationControlTotalOutputComponents_ = {},
- uint32_t maxTessellationEvaluationInputComponents_ = {},
- uint32_t maxTessellationEvaluationOutputComponents_ = {},
- uint32_t maxGeometryShaderInvocations_ = {},
- uint32_t maxGeometryInputComponents_ = {},
- uint32_t maxGeometryOutputComponents_ = {},
- uint32_t maxGeometryOutputVertices_ = {},
- uint32_t maxGeometryTotalOutputComponents_ = {},
- uint32_t maxFragmentInputComponents_ = {},
- uint32_t maxFragmentOutputAttachments_ = {},
- uint32_t maxFragmentDualSrcAttachments_ = {},
- uint32_t maxFragmentCombinedOutputResources_ = {},
- uint32_t maxComputeSharedMemorySize_ = {},
- std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = {},
- uint32_t maxComputeWorkGroupInvocations_ = {},
- std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = {},
- uint32_t subPixelPrecisionBits_ = {},
- uint32_t subTexelPrecisionBits_ = {},
- uint32_t mipmapPrecisionBits_ = {},
- uint32_t maxDrawIndexedIndexValue_ = {},
- uint32_t maxDrawIndirectCount_ = {},
- float maxSamplerLodBias_ = {},
- float maxSamplerAnisotropy_ = {},
- uint32_t maxViewports_ = {},
- std::array<uint32_t,2> const& maxViewportDimensions_ = {},
- std::array<float,2> const& viewportBoundsRange_ = {},
- uint32_t viewportSubPixelBits_ = {},
- size_t minMemoryMapAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {},
- int32_t minTexelOffset_ = {},
- uint32_t maxTexelOffset_ = {},
- int32_t minTexelGatherOffset_ = {},
- uint32_t maxTexelGatherOffset_ = {},
- float minInterpolationOffset_ = {},
- float maxInterpolationOffset_ = {},
- uint32_t subPixelInterpolationOffsetBits_ = {},
- uint32_t maxFramebufferWidth_ = {},
- uint32_t maxFramebufferHeight_ = {},
- uint32_t maxFramebufferLayers_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {},
- uint32_t maxColorAttachments_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {},
- uint32_t maxSampleMaskWords_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {},
- float timestampPeriod_ = {},
- uint32_t maxClipDistances_ = {},
- uint32_t maxCullDistances_ = {},
- uint32_t maxCombinedClipAndCullDistances_ = {},
- uint32_t discreteQueuePriorities_ = {},
- std::array<float,2> const& pointSizeRange_ = {},
- std::array<float,2> const& lineWidthRange_ = {},
- float pointSizeGranularity_ = {},
- float lineWidthGranularity_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxImageDimension1D( maxImageDimension1D_ )
- , maxImageDimension2D( maxImageDimension2D_ )
- , maxImageDimension3D( maxImageDimension3D_ )
- , maxImageDimensionCube( maxImageDimensionCube_ )
- , maxImageArrayLayers( maxImageArrayLayers_ )
- , maxTexelBufferElements( maxTexelBufferElements_ )
- , maxUniformBufferRange( maxUniformBufferRange_ )
- , maxStorageBufferRange( maxStorageBufferRange_ )
- , maxPushConstantsSize( maxPushConstantsSize_ )
- , maxMemoryAllocationCount( maxMemoryAllocationCount_ )
- , maxSamplerAllocationCount( maxSamplerAllocationCount_ )
- , bufferImageGranularity( bufferImageGranularity_ )
- , sparseAddressSpaceSize( sparseAddressSpaceSize_ )
- , maxBoundDescriptorSets( maxBoundDescriptorSets_ )
- , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ )
- , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ )
- , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ )
- , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ )
- , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ )
- , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ )
- , maxPerStageResources( maxPerStageResources_ )
- , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ )
- , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ )
- , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ )
- , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ )
- , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ )
- , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ )
- , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ )
- , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ )
- , maxVertexInputAttributes( maxVertexInputAttributes_ )
- , maxVertexInputBindings( maxVertexInputBindings_ )
- , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ )
- , maxVertexInputBindingStride( maxVertexInputBindingStride_ )
- , maxVertexOutputComponents( maxVertexOutputComponents_ )
- , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ )
- , maxTessellationPatchSize( maxTessellationPatchSize_ )
- , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ )
- , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ )
- , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ )
- , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ )
- , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ )
- , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ )
- , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ )
- , maxGeometryInputComponents( maxGeometryInputComponents_ )
- , maxGeometryOutputComponents( maxGeometryOutputComponents_ )
- , maxGeometryOutputVertices( maxGeometryOutputVertices_ )
- , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ )
- , maxFragmentInputComponents( maxFragmentInputComponents_ )
- , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ )
- , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ )
- , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ )
- , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ )
- , maxComputeWorkGroupCount{}
- , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ )
- , maxComputeWorkGroupSize{}
- , subPixelPrecisionBits( subPixelPrecisionBits_ )
- , subTexelPrecisionBits( subTexelPrecisionBits_ )
- , mipmapPrecisionBits( mipmapPrecisionBits_ )
- , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ )
- , maxDrawIndirectCount( maxDrawIndirectCount_ )
- , maxSamplerLodBias( maxSamplerLodBias_ )
- , maxSamplerAnisotropy( maxSamplerAnisotropy_ )
- , maxViewports( maxViewports_ )
- , maxViewportDimensions{}
- , viewportBoundsRange{}
- , viewportSubPixelBits( viewportSubPixelBits_ )
- , minMemoryMapAlignment( minMemoryMapAlignment_ )
- , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ )
- , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ )
- , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ )
- , minTexelOffset( minTexelOffset_ )
- , maxTexelOffset( maxTexelOffset_ )
- , minTexelGatherOffset( minTexelGatherOffset_ )
- , maxTexelGatherOffset( maxTexelGatherOffset_ )
- , minInterpolationOffset( minInterpolationOffset_ )
- , maxInterpolationOffset( maxInterpolationOffset_ )
- , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ )
- , maxFramebufferWidth( maxFramebufferWidth_ )
- , maxFramebufferHeight( maxFramebufferHeight_ )
- , maxFramebufferLayers( maxFramebufferLayers_ )
- , framebufferColorSampleCounts( framebufferColorSampleCounts_ )
- , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ )
- , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ )
- , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ )
- , maxColorAttachments( maxColorAttachments_ )
- , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ )
- , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ )
- , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ )
- , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ )
- , storageImageSampleCounts( storageImageSampleCounts_ )
- , maxSampleMaskWords( maxSampleMaskWords_ )
- , timestampComputeAndGraphics( timestampComputeAndGraphics_ )
- , timestampPeriod( timestampPeriod_ )
- , maxClipDistances( maxClipDistances_ )
- , maxCullDistances( maxCullDistances_ )
- , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ )
- , discreteQueuePriorities( discreteQueuePriorities_ )
- , pointSizeRange{}
- , lineWidthRange{}
- , pointSizeGranularity( pointSizeGranularity_ )
- , lineWidthGranularity( lineWidthGranularity_ )
- , strictLines( strictLines_ )
- , standardSampleLocations( standardSampleLocations_ )
- , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
- , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
- , nonCoherentAtomSize( nonCoherentAtomSize_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxComputeWorkGroupCount, maxComputeWorkGroupCount_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxComputeWorkGroupSize, maxComputeWorkGroupSize_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,2,2>::copy( maxViewportDimensions, maxViewportDimensions_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( viewportBoundsRange, viewportBoundsRange_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( pointSizeRange, pointSizeRange_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( lineWidthRange, lineWidthRange_ );
- }
-
- PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>(&rhs);
- return *this;
- }
-
- operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
- }
-
- operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
- }
-
- bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( maxImageDimension1D == rhs.maxImageDimension1D )
- && ( maxImageDimension2D == rhs.maxImageDimension2D )
- && ( maxImageDimension3D == rhs.maxImageDimension3D )
- && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
- && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
- && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
- && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
- && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
- && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
- && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
- && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
- && ( bufferImageGranularity == rhs.bufferImageGranularity )
- && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
- && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
- && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
- && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
- && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
- && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
- && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
- && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
- && ( maxPerStageResources == rhs.maxPerStageResources )
- && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
- && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
- && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
- && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
- && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
- && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
- && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
- && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
- && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
- && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
- && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
- && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
- && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
- && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
- && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
- && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
- && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
- && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
- && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
- && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
- && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
- && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
- && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
- && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
- && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
- && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
- && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
- && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
- && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
- && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
- && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
- && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
- && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
- && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
- && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
- && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
- && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
- && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
- && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
- && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
- && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
- && ( maxViewports == rhs.maxViewports )
- && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
- && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
- && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
- && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
- && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
- && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
- && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
- && ( minTexelOffset == rhs.minTexelOffset )
- && ( maxTexelOffset == rhs.maxTexelOffset )
- && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
- && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
- && ( minInterpolationOffset == rhs.minInterpolationOffset )
- && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
- && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
- && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
- && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
- && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
- && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
- && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
- && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
- && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
- && ( maxColorAttachments == rhs.maxColorAttachments )
- && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
- && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
- && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
- && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
- && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
- && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
- && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
- && ( timestampPeriod == rhs.timestampPeriod )
- && ( maxClipDistances == rhs.maxClipDistances )
- && ( maxCullDistances == rhs.maxCullDistances )
- && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
- && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
- && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
- && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
- && ( pointSizeGranularity == rhs.pointSizeGranularity )
- && ( lineWidthGranularity == rhs.lineWidthGranularity )
- && ( strictLines == rhs.strictLines )
- && ( standardSampleLocations == rhs.standardSampleLocations )
- && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
- && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
- && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
- }
-
- bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- uint32_t maxImageDimension1D = {};
- uint32_t maxImageDimension2D = {};
- uint32_t maxImageDimension3D = {};
- uint32_t maxImageDimensionCube = {};
- uint32_t maxImageArrayLayers = {};
- uint32_t maxTexelBufferElements = {};
- uint32_t maxUniformBufferRange = {};
- uint32_t maxStorageBufferRange = {};
- uint32_t maxPushConstantsSize = {};
- uint32_t maxMemoryAllocationCount = {};
- uint32_t maxSamplerAllocationCount = {};
- VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
- VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
- uint32_t maxBoundDescriptorSets = {};
- uint32_t maxPerStageDescriptorSamplers = {};
- uint32_t maxPerStageDescriptorUniformBuffers = {};
- uint32_t maxPerStageDescriptorStorageBuffers = {};
- uint32_t maxPerStageDescriptorSampledImages = {};
- uint32_t maxPerStageDescriptorStorageImages = {};
- uint32_t maxPerStageDescriptorInputAttachments = {};
- uint32_t maxPerStageResources = {};
- uint32_t maxDescriptorSetSamplers = {};
- uint32_t maxDescriptorSetUniformBuffers = {};
- uint32_t maxDescriptorSetUniformBuffersDynamic = {};
- uint32_t maxDescriptorSetStorageBuffers = {};
- uint32_t maxDescriptorSetStorageBuffersDynamic = {};
- uint32_t maxDescriptorSetSampledImages = {};
- uint32_t maxDescriptorSetStorageImages = {};
- uint32_t maxDescriptorSetInputAttachments = {};
- uint32_t maxVertexInputAttributes = {};
- uint32_t maxVertexInputBindings = {};
- uint32_t maxVertexInputAttributeOffset = {};
- uint32_t maxVertexInputBindingStride = {};
- uint32_t maxVertexOutputComponents = {};
- uint32_t maxTessellationGenerationLevel = {};
- uint32_t maxTessellationPatchSize = {};
- uint32_t maxTessellationControlPerVertexInputComponents = {};
- uint32_t maxTessellationControlPerVertexOutputComponents = {};
- uint32_t maxTessellationControlPerPatchOutputComponents = {};
- uint32_t maxTessellationControlTotalOutputComponents = {};
- uint32_t maxTessellationEvaluationInputComponents = {};
- uint32_t maxTessellationEvaluationOutputComponents = {};
- uint32_t maxGeometryShaderInvocations = {};
- uint32_t maxGeometryInputComponents = {};
- uint32_t maxGeometryOutputComponents = {};
- uint32_t maxGeometryOutputVertices = {};
- uint32_t maxGeometryTotalOutputComponents = {};
- uint32_t maxFragmentInputComponents = {};
- uint32_t maxFragmentOutputAttachments = {};
- uint32_t maxFragmentDualSrcAttachments = {};
- uint32_t maxFragmentCombinedOutputResources = {};
- uint32_t maxComputeSharedMemorySize = {};
- uint32_t maxComputeWorkGroupCount[3] = {};
- uint32_t maxComputeWorkGroupInvocations = {};
- uint32_t maxComputeWorkGroupSize[3] = {};
- uint32_t subPixelPrecisionBits = {};
- uint32_t subTexelPrecisionBits = {};
- uint32_t mipmapPrecisionBits = {};
- uint32_t maxDrawIndexedIndexValue = {};
- uint32_t maxDrawIndirectCount = {};
- float maxSamplerLodBias = {};
- float maxSamplerAnisotropy = {};
- uint32_t maxViewports = {};
- uint32_t maxViewportDimensions[2] = {};
- float viewportBoundsRange[2] = {};
- uint32_t viewportSubPixelBits = {};
- size_t minMemoryMapAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
- int32_t minTexelOffset = {};
- uint32_t maxTexelOffset = {};
- int32_t minTexelGatherOffset = {};
- uint32_t maxTexelGatherOffset = {};
- float minInterpolationOffset = {};
- float maxInterpolationOffset = {};
- uint32_t subPixelInterpolationOffsetBits = {};
- uint32_t maxFramebufferWidth = {};
- uint32_t maxFramebufferHeight = {};
- uint32_t maxFramebufferLayers = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
- uint32_t maxColorAttachments = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
- uint32_t maxSampleMaskWords = {};
- VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
- float timestampPeriod = {};
- uint32_t maxClipDistances = {};
- uint32_t maxCullDistances = {};
- uint32_t maxCombinedClipAndCullDistances = {};
- uint32_t discreteQueuePriorities = {};
- float pointSizeRange[2] = {};
- float lineWidthRange[2] = {};
- float pointSizeGranularity = {};
- float lineWidthGranularity = {};
- VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
- VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
- VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
+ using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT;
};
- static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceLineRasterizationFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT
- : rectangularLines( rectangularLines_ )
- , bresenhamLines( bresenhamLines_ )
- , smoothLines( smoothLines_ )
- , stippledRectangularLines( stippledRectangularLines_ )
- , stippledBresenhamLines( stippledBresenhamLines_ )
- , stippledSmoothLines( stippledSmoothLines_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}) VULKAN_HPP_NOEXCEPT
+ : rectangularLines( rectangularLines_ ), bresenhamLines( bresenhamLines_ ), smoothLines( smoothLines_ ), stippledRectangularLines( stippledRectangularLines_ ), stippledBresenhamLines( stippledBresenhamLines_ ), stippledSmoothLines( stippledSmoothLines_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) );
return *this;
}
@@ -45834,6 +65329,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
@@ -45844,6 +65340,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45860,6 +65360,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
@@ -45870,33 +65373,48 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
+
};
static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
+ {
+ using Type = PhysicalDeviceLineRasterizationFeaturesEXT;
+ };
+
struct PhysicalDeviceLineRasterizationPropertiesEXT
{
- PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}) VULKAN_HPP_NOEXCEPT
+ : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) );
return *this;
}
+
operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
@@ -45907,6 +65425,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45918,40 +65440,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
void* pNext = {};
uint32_t lineSubPixelPrecisionBits = {};
+
};
static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
+ {
+ using Type = PhysicalDeviceLineRasterizationPropertiesEXT;
+ };
+
struct PhysicalDeviceMaintenance3Properties
{
- PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxPerSetDescriptors( maxPerSetDescriptors_ )
- , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMaintenance3Properties ) );
return *this;
}
+
operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
@@ -45962,6 +65500,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default;
+#else
bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -45974,44 +65516,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
void* pNext = {};
uint32_t maxPerSetDescriptors = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+
};
static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
+ {
+ using Type = PhysicalDeviceMaintenance3Properties;
+ };
+ using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+
struct PhysicalDeviceMemoryBudgetPropertiesEXT
{
- PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = {},
- std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT
- : heapBudget{}
- , heapUsage{}
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( heapBudget, heapBudget_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( heapUsage, heapUsage_ );
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) );
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = {}, std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = {}) VULKAN_HPP_NOEXCEPT
+ : heapBudget( heapBudget_ ), heapUsage( heapUsage_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) );
return *this;
}
+
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
@@ -46022,48 +65578,69 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 )
- && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 );
+ && ( heapBudget == rhs.heapBudget )
+ && ( heapUsage == rhs.heapUsage );
}
bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS] = {};
- VULKAN_HPP_NAMESPACE::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
+
};
static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
+ {
+ using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
+ };
+
struct PhysicalDeviceMemoryPriorityFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryPriority( memoryPriority_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}) VULKAN_HPP_NOEXCEPT
+ : memoryPriority( memoryPriority_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) );
return *this;
}
@@ -46079,6 +65656,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
@@ -46089,6 +65667,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46100,148 +65682,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
+
};
static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceMemoryProperties
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
{
- PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = {},
- uint32_t memoryHeapCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeCount( memoryTypeCount_ )
- , memoryTypes{}
- , memoryHeapCount( memoryHeapCount_ )
- , memoryHeaps{}
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES,VK_MAX_MEMORY_TYPES>::copy( memoryTypes, memoryTypes_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( memoryHeaps, memoryHeaps_ );
- }
-
- PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>(&rhs);
- return *this;
- }
-
- operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
- }
-
- operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
- }
-
- bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( memoryTypeCount == rhs.memoryTypeCount )
- && ( memcmp( memoryTypes, rhs.memoryTypes, std::min<uint32_t>( VK_MAX_MEMORY_TYPES, memoryTypeCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 )
- && ( memoryHeapCount == rhs.memoryHeapCount )
- && ( memcmp( memoryHeaps, rhs.memoryHeaps, std::min<uint32_t>( VK_MAX_MEMORY_HEAPS, memoryHeapCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
- }
-
- bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- uint32_t memoryTypeCount = {};
- VULKAN_HPP_NAMESPACE::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES] = {};
- uint32_t memoryHeapCount = {};
- VULKAN_HPP_NAMESPACE::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS] = {};
+ using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
};
- static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceMemoryProperties2
+ struct PhysicalDeviceMeshShaderFeaturesNV
{
- PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryProperties( memoryProperties_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}) VULKAN_HPP_NOEXCEPT
+ : taskShader( taskShader_ ), meshShader( meshShader_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>(&rhs);
- return *this;
- }
-
- operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
- }
-
- operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
- }
-
- bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( memoryProperties == rhs.memoryProperties );
- }
-
- bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
- };
- static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
-
- struct PhysicalDeviceMeshShaderFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT
- : taskShader( taskShader_ )
- , meshShader( meshShader_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
return *this;
}
- PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
- }
-
- PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) );
return *this;
}
@@ -46263,6 +65749,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
@@ -46273,6 +65760,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46285,66 +65776,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
+
};
static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
+ {
+ using Type = PhysicalDeviceMeshShaderFeaturesNV;
+ };
+
struct PhysicalDeviceMeshShaderPropertiesNV
{
- PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {},
- uint32_t maxTaskWorkGroupInvocations_ = {},
- std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = {},
- uint32_t maxTaskTotalMemorySize_ = {},
- uint32_t maxTaskOutputCount_ = {},
- uint32_t maxMeshWorkGroupInvocations_ = {},
- std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = {},
- uint32_t maxMeshTotalMemorySize_ = {},
- uint32_t maxMeshOutputVertices_ = {},
- uint32_t maxMeshOutputPrimitives_ = {},
- uint32_t maxMeshMultiviewViewCount_ = {},
- uint32_t meshOutputPerVertexGranularity_ = {},
- uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
- , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
- , maxTaskWorkGroupSize{}
- , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
- , maxTaskOutputCount( maxTaskOutputCount_ )
- , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
- , maxMeshWorkGroupSize{}
- , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ )
- , maxMeshOutputVertices( maxMeshOutputVertices_ )
- , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
- , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
- , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
- , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxTaskWorkGroupSize, maxTaskWorkGroupSize_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxMeshWorkGroupSize, maxMeshWorkGroupSize_ );
- }
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ), maxTaskOutputCount( maxTaskOutputCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
return *this;
}
+ PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
@@ -46355,17 +65837,21 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
&& ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
- && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+ && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
&& ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
&& ( maxTaskOutputCount == rhs.maxTaskOutputCount )
&& ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
- && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+ && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
&& ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
&& ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
&& ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
@@ -46378,51 +65864,64 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
void* pNext = {};
uint32_t maxDrawMeshTasksCount = {};
uint32_t maxTaskWorkGroupInvocations = {};
- uint32_t maxTaskWorkGroupSize[3] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
uint32_t maxTaskTotalMemorySize = {};
uint32_t maxTaskOutputCount = {};
uint32_t maxMeshWorkGroupInvocations = {};
- uint32_t maxMeshWorkGroupSize[3] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
uint32_t maxMeshTotalMemorySize = {};
uint32_t maxMeshOutputVertices = {};
uint32_t maxMeshOutputPrimitives = {};
uint32_t maxMeshMultiviewViewCount = {};
uint32_t meshOutputPerVertexGranularity = {};
uint32_t meshOutputPerPrimitiveGranularity = {};
+
};
static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
+ {
+ using Type = PhysicalDeviceMeshShaderPropertiesNV;
+ };
+
struct PhysicalDeviceMultiviewFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT
- : multiview( multiview_ )
- , multiviewGeometryShader( multiviewGeometryShader_ )
- , multiviewTessellationShader( multiviewTessellationShader_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}) VULKAN_HPP_NOEXCEPT
+ : multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) );
return *this;
}
@@ -46450,6 +65949,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
@@ -46460,6 +65960,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46473,6 +65977,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
@@ -46480,33 +65987,49 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
+
};
static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
+ {
+ using Type = PhysicalDeviceMultiviewFeatures;
+ };
+ using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+
struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
{
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT
- : perViewPositionAllComponents( perViewPositionAllComponents_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}) VULKAN_HPP_NOEXCEPT
+ : perViewPositionAllComponents( perViewPositionAllComponents_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) );
return *this;
}
+
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
@@ -46517,6 +66040,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default;
+#else
bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46528,40 +66055,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
+
};
static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
+ {
+ using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+ };
+
struct PhysicalDeviceMultiviewProperties
{
- PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {},
- uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxMultiviewViewCount( maxMultiviewViewCount_ )
- , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
return *this;
}
+ PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMultiviewProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
@@ -46572,6 +66115,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46584,45 +66131,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
void* pNext = {};
uint32_t maxMultiviewViewCount = {};
uint32_t maxMultiviewInstanceIndex = {};
+
};
static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
+ {
+ using Type = PhysicalDeviceMultiviewProperties;
+ };
+ using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+
struct PhysicalDevicePCIBusInfoPropertiesEXT
{
- PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {},
- uint32_t pciBus_ = {},
- uint32_t pciDevice_ = {},
- uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT
- : pciDomain( pciDomain_ )
- , pciBus( pciBus_ )
- , pciDevice( pciDevice_ )
- , pciFunction( pciFunction_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}) VULKAN_HPP_NOEXCEPT
+ : pciDomain( pciDomain_ ), pciBus( pciBus_ ), pciDevice( pciDevice_ ), pciFunction( pciFunction_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
@@ -46633,6 +66193,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46647,6 +66211,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
@@ -46655,32 +66222,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t pciBus = {};
uint32_t pciDevice = {};
uint32_t pciFunction = {};
+
};
static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
+ {
+ using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
+ };
+
struct PhysicalDevicePerformanceQueryFeaturesKHR
{
- VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT
- : performanceCounterQueryPools( performanceCounterQueryPools_ )
- , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}) VULKAN_HPP_NOEXCEPT
+ : performanceCounterQueryPools( performanceCounterQueryPools_ ), performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) - offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
+ return *this;
+ }
- PhysicalDevicePerformanceQueryFeaturesKHR& operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) );
return *this;
}
@@ -46702,6 +66281,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
@@ -46712,6 +66292,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const& ) const = default;
+#else
bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46724,39 +66308,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
+
};
static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
+ {
+ using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
+ };
+
struct PhysicalDevicePerformanceQueryPropertiesKHR
{
- PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT
- : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}) VULKAN_HPP_NOEXCEPT
+ : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) - offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDevicePerformanceQueryPropertiesKHR& operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
return *this;
}
+ PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) );
+ return *this;
+ }
+
+
operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
@@ -46767,6 +66369,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const& ) const = default;
+#else
bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46778,35 +66384,139 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
+
};
static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
{
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineExecutableInfo( pipelineExecutableInfo_ )
+ using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
+ };
+
+ struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipelineCreationCacheControl( pipelineCreationCacheControl_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT const *>( &rhs );
return *this;
}
+ PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) );
+ return *this;
+ }
+
+ PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineCreationCacheControl = pipelineCreationCacheControl_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT*>( this );
+ }
+
+ operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
+ }
+
+ bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
+
+ };
+ static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevicePipelineCreationCacheControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT>
+ {
+ using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+ };
+
+ struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipelineExecutableInfo( pipelineExecutableInfo_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
+ return *this;
+ }
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) );
return *this;
}
@@ -46822,6 +66532,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
@@ -46832,6 +66543,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default;
+#else
bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46843,38 +66558,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
+
};
static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
+ {
+ using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+ };
+
struct PhysicalDevicePointClippingProperties
{
- PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT
- : pointClippingBehavior( pointClippingBehavior_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes) VULKAN_HPP_NOEXCEPT
+ : pointClippingBehavior( pointClippingBehavior_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
return *this;
}
+ PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePointClippingProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
@@ -46885,6 +66618,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default;
+#else
bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -46896,225 +66633,430 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
void* pNext = {};
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+
};
static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceSparseProperties
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
+ {
+ using Type = PhysicalDevicePointClippingProperties;
+ };
+ using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct PhysicalDevicePortabilitySubsetFeaturesKHR
{
- PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT
- : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
- , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
- , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
- , residencyAlignedMipSize( residencyAlignedMipSize_ )
- , residencyNonResidentStrict( residencyNonResidentStrict_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}) VULKAN_HPP_NOEXCEPT
+ : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ), events( events_ ), imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ), imageViewFormatSwizzle( imageViewFormatSwizzle_ ), imageView2DOn3DImage( imageView2DOn3DImage_ ), multisampleArrayImage( multisampleArrayImage_ ), mutableComparisonSamplers( mutableComparisonSamplers_ ), pointPolygons( pointPolygons_ ), samplerMipLodBias( samplerMipLodBias_ ), separateStencilMaskRef( separateStencilMaskRef_ ), shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ), tessellationIsolines( tessellationIsolines_ ), tessellationPointMode( tessellationPointMode_ ), triangleFans( triangleFans_ ), vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
{}
- PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
return *this;
}
- operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) );
+ return *this;
}
- operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
{
- return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
- && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
- && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
- && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
- && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+ constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
+ return *this;
}
- bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
+ {
+ events = events_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageViewFormatSwizzle = imageViewFormatSwizzle_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageView2DOn3DImage = imageView2DOn3DImage_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multisampleArrayImage = multisampleArrayImage_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mutableComparisonSamplers = mutableComparisonSamplers_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pointPolygons = pointPolygons_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
+ {
+ samplerMipLodBias = samplerMipLodBias_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
+ {
+ separateStencilMaskRef = separateStencilMaskRef_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tessellationIsolines = tessellationIsolines_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tessellationPointMode = tessellationPointMode_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
+ {
+ triangleFans = triangleFans_;
+ return *this;
+ }
+
+ PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
+ }
+
+ operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const& ) const = default;
+#else
+ bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors )
+ && ( events == rhs.events )
+ && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation )
+ && ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle )
+ && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage )
+ && ( multisampleArrayImage == rhs.multisampleArrayImage )
+ && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers )
+ && ( pointPolygons == rhs.pointPolygons )
+ && ( samplerMipLodBias == rhs.samplerMipLodBias )
+ && ( separateStencilMaskRef == rhs.separateStencilMaskRef )
+ && ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions )
+ && ( tessellationIsolines == rhs.tessellationIsolines )
+ && ( tessellationPointMode == rhs.tessellationPointMode )
+ && ( triangleFans == rhs.triangleFans )
+ && ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
+ }
+
+ bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
- VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
- VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
+ VULKAN_HPP_NAMESPACE::Bool32 events = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {};
+ VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {};
+ VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {};
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
+
+ };
+ static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
+ {
+ using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
};
- static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- struct PhysicalDeviceProperties
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct PhysicalDevicePortabilitySubsetPropertiesKHR
{
- PhysicalDeviceProperties( uint32_t apiVersion_ = {},
- uint32_t driverVersion_ = {},
- uint32_t vendorID_ = {},
- uint32_t deviceID_ = {},
- VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
- std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = {},
- std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = {},
- VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : apiVersion( apiVersion_ )
- , driverVersion( driverVersion_ )
- , vendorID( vendorID_ )
- , deviceID( deviceID_ )
- , deviceType( deviceType_ )
- , deviceName{}
- , pipelineCacheUUID{}
- , limits( limits_ )
- , sparseProperties( sparseProperties_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE>::copy( deviceName, deviceName_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( pipelineCacheUUID, pipelineCacheUUID_ );
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
- PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+ : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
return *this;
}
- operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) );
+ return *this;
}
- operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
+ pNext = pNext_;
+ return *this;
}
- bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
{
- return ( apiVersion == rhs.apiVersion )
- && ( driverVersion == rhs.driverVersion )
- && ( vendorID == rhs.vendorID )
- && ( deviceID == rhs.deviceID )
- && ( deviceType == rhs.deviceType )
- && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
- && ( limits == rhs.limits )
- && ( sparseProperties == rhs.sparseProperties );
+ minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
+ return *this;
}
- bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
+ }
+
+ operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const& ) const = default;
+#else
+ bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
+ }
+
+ bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- uint32_t apiVersion = {};
- uint32_t driverVersion = {};
- uint32_t vendorID = {};
- uint32_t deviceID = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
- char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] = {};
- uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
+ void* pNext = {};
+ uint32_t minVertexInputBindingStrideAlignment = {};
+
};
- static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceProperties2
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
{
- PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
- : properties( properties_ )
+ using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ struct PhysicalDevicePrivateDataFeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}) VULKAN_HPP_NOEXCEPT
+ : privateData( privateData_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevicePrivateDataFeaturesEXT & operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT const *>( &rhs );
return *this;
}
- PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePrivateDataFeaturesEXT & operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePrivateDataFeaturesEXT ) );
+ return *this;
}
- PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePrivateDataFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>(&rhs);
+ pNext = pNext_;
return *this;
}
- operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePrivateDataFeaturesEXT & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
+ privateData = privateData_;
+ return *this;
}
- operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
+
+ operator VkPhysicalDevicePrivateDataFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
+ return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeaturesEXT*>( this );
}
- bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePrivateDataFeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( properties == rhs.properties );
+ && ( privateData == rhs.privateData );
}
- bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
void* pNext = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
+ VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
+
+ };
+ static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevicePrivateDataFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeaturesEXT>
+ {
+ using Type = PhysicalDevicePrivateDataFeaturesEXT;
};
- static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceProtectedMemoryFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT
- : protectedMemory( protectedMemory_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}) VULKAN_HPP_NOEXCEPT
+ : protectedMemory( protectedMemory_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) );
return *this;
}
@@ -47130,6 +67072,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
@@ -47140,6 +67083,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47151,38 +67098,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
+
};
static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
+ {
+ using Type = PhysicalDeviceProtectedMemoryFeatures;
+ };
+
struct PhysicalDeviceProtectedMemoryProperties
{
- PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT
- : protectedNoFault( protectedNoFault_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}) VULKAN_HPP_NOEXCEPT
+ : protectedNoFault( protectedNoFault_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
return *this;
}
+ PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
@@ -47193,6 +67158,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47204,38 +67173,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+
};
static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
+ {
+ using Type = PhysicalDeviceProtectedMemoryProperties;
+ };
+
struct PhysicalDevicePushDescriptorPropertiesKHR
{
- PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxPushDescriptors( maxPushDescriptors_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxPushDescriptors( maxPushDescriptors_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
+ return *this;
+ }
- PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
return *this;
}
+
operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
@@ -47246,6 +67233,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default;
+#else
bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47257,52 +67248,302 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
void* pNext = {};
uint32_t maxPushDescriptors = {};
+
};
static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceRayTracingPropertiesNV
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
+ {
+ using Type = PhysicalDevicePushDescriptorPropertiesKHR;
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct PhysicalDeviceRayTracingFeaturesKHR
{
- PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {},
- uint32_t maxRecursionDepth_ = {},
- uint32_t maxShaderGroupStride_ = {},
- uint32_t shaderGroupBaseAlignment_ = {},
- uint64_t maxGeometryCount_ = {},
- uint64_t maxInstanceCount_ = {},
- uint64_t maxTriangleCount_ = {},
- uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderGroupHandleSize( shaderGroupHandleSize_ )
- , maxRecursionDepth( maxRecursionDepth_ )
- , maxShaderGroupStride( maxShaderGroupStride_ )
- , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
- , maxGeometryCount( maxGeometryCount_ )
- , maxInstanceCount( maxInstanceCount_ )
- , maxTriangleCount( maxTriangleCount_ )
- , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {}) VULKAN_HPP_NOEXCEPT
+ : rayTracing( rayTracing_ ), rayTracingShaderGroupHandleCaptureReplay( rayTracingShaderGroupHandleCaptureReplay_ ), rayTracingShaderGroupHandleCaptureReplayMixed( rayTracingShaderGroupHandleCaptureReplayMixed_ ), rayTracingAccelerationStructureCaptureReplay( rayTracingAccelerationStructureCaptureReplay_ ), rayTracingIndirectTraceRays( rayTracingIndirectTraceRays_ ), rayTracingIndirectAccelerationStructureBuild( rayTracingIndirectAccelerationStructureBuild_ ), rayTracingHostAccelerationStructureCommands( rayTracingHostAccelerationStructureCommands_ ), rayQuery( rayQuery_ ), rayTracingPrimitiveCulling( rayTracingPrimitiveCulling_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceRayTracingFeaturesKHR( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceRayTracingFeaturesKHR & operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingFeaturesKHR const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRayTracingFeaturesKHR ) );
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracing( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracing = rayTracing_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingShaderGroupHandleCaptureReplay = rayTracingShaderGroupHandleCaptureReplay_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingShaderGroupHandleCaptureReplayMixed = rayTracingShaderGroupHandleCaptureReplayMixed_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingAccelerationStructureCaptureReplay = rayTracingAccelerationStructureCaptureReplay_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingIndirectTraceRays = rayTracingIndirectTraceRays_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingIndirectAccelerationStructureBuild = rayTracingIndirectAccelerationStructureBuild_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingHostAccelerationStructureCommands = rayTracingHostAccelerationStructureCommands_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayQuery = rayQuery_;
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingFeaturesKHR & setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) );
+ rayTracingPrimitiveCulling = rayTracingPrimitiveCulling_;
return *this;
}
+
+ operator VkPhysicalDeviceRayTracingFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceRayTracingFeaturesKHR*>( this );
+ }
+
+ operator VkPhysicalDeviceRayTracingFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceRayTracingFeaturesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( rayTracing == rhs.rayTracing )
+ && ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay )
+ && ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed )
+ && ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay )
+ && ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays )
+ && ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild )
+ && ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands )
+ && ( rayQuery == rhs.rayQuery )
+ && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling );
+ }
+
+ bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceRayTracingFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingFeaturesKHR>
+ {
+ using Type = PhysicalDeviceRayTracingFeaturesKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct PhysicalDeviceRayTracingPropertiesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxPrimitiveCount( maxPrimitiveCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ), shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceRayTracingPropertiesKHR( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceRayTracingPropertiesKHR & operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesKHR const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceRayTracingPropertiesKHR & operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRayTracingPropertiesKHR ) );
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceRayTracingPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesKHR*>( this );
+ }
+
+ operator VkPhysicalDeviceRayTracingPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
+ && ( maxRecursionDepth == rhs.maxRecursionDepth )
+ && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
+ && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
+ && ( maxGeometryCount == rhs.maxGeometryCount )
+ && ( maxInstanceCount == rhs.maxInstanceCount )
+ && ( maxPrimitiveCount == rhs.maxPrimitiveCount )
+ && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures )
+ && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize );
+ }
+
+ bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR;
+ void* pNext = {};
+ uint32_t shaderGroupHandleSize = {};
+ uint32_t maxRecursionDepth = {};
+ uint32_t maxShaderGroupStride = {};
+ uint32_t shaderGroupBaseAlignment = {};
+ uint64_t maxGeometryCount = {};
+ uint64_t maxInstanceCount = {};
+ uint64_t maxPrimitiveCount = {};
+ uint32_t maxDescriptorSetAccelerationStructures = {};
+ uint32_t shaderGroupHandleCaptureReplaySize = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesKHR>
+ {
+ using Type = PhysicalDeviceRayTracingPropertiesKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ struct PhysicalDeviceRayTracingPropertiesNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxTriangleCount( maxTriangleCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
return *this;
}
+ PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
@@ -47313,6 +67554,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47331,6 +67576,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
@@ -47343,30 +67591,44 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t maxInstanceCount = {};
uint64_t maxTriangleCount = {};
uint32_t maxDescriptorSetAccelerationStructures = {};
+
};
static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
+ {
+ using Type = PhysicalDeviceRayTracingPropertiesNV;
+ };
+
struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT
- : representativeFragmentTest( representativeFragmentTest_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}) VULKAN_HPP_NOEXCEPT
+ : representativeFragmentTest( representativeFragmentTest_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) );
return *this;
}
@@ -47382,6 +67644,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
@@ -47392,6 +67655,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47403,48 +67670,236 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
+
};
static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceSampleLocationsPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
{
- PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {},
- std::array<float,2> const& sampleLocationCoordinateRange_ = {},
- uint32_t sampleLocationSubPixelBits_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleLocationSampleCounts( sampleLocationSampleCounts_ )
- , maxSampleLocationGridSize( maxSampleLocationGridSize_ )
- , sampleLocationCoordinateRange{}
- , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
- , variableSampleLocations( variableSampleLocations_ )
+ using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+ };
+
+ struct PhysicalDeviceRobustness2FeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
+ : robustBufferAccess2( robustBufferAccess2_ ), robustImageAccess2( robustImageAccess2_ ), nullDescriptor( nullDescriptor_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( sampleLocationCoordinateRange, sampleLocationCoordinateRange_ );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
+ return *this;
}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) );
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRobustness2FeaturesEXT ) );
return *this;
}
+ PhysicalDeviceRobustness2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
+ {
+ robustBufferAccess2 = robustBufferAccess2_;
+ return *this;
+ }
+
+ PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
+ {
+ robustImageAccess2 = robustImageAccess2_;
+ return *this;
+ }
+
+ PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ nullDescriptor = nullDescriptor_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceRobustness2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( robustBufferAccess2 == rhs.robustBufferAccess2 )
+ && ( robustImageAccess2 == rhs.robustImageAccess2 )
+ && ( nullDescriptor == rhs.nullDescriptor );
+ }
+
+ bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceRobustness2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
+ {
+ using Type = PhysicalDeviceRobustness2FeaturesEXT;
+ };
+
+ struct PhysicalDeviceRobustness2PropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+ : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ), robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRobustness2PropertiesEXT ) );
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceRobustness2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment )
+ && ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
+ }
+
+ bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceRobustness2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
+ {
+ using Type = PhysicalDeviceRobustness2PropertiesEXT;
+ };
+
+ struct PhysicalDeviceSampleLocationsPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array<float,2> const& sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+ : sampleLocationSampleCounts( sampleLocationSampleCounts_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ ), sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ), sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ), variableSampleLocations( variableSampleLocations_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
@@ -47455,13 +67910,17 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
&& ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
- && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 )
+ && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange )
&& ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
&& ( variableSampleLocations == rhs.variableSampleLocations );
}
@@ -47470,44 +67929,60 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
- float sampleLocationCoordinateRange[2] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
uint32_t sampleLocationSubPixelBits = {};
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
+
};
static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
+ {
+ using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
+ };
+
struct PhysicalDeviceSamplerFilterMinmaxProperties
{
- PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT
- : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
- , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}) VULKAN_HPP_NOEXCEPT
+ : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) - offsetof( PhysicalDeviceSamplerFilterMinmaxProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceSamplerFilterMinmaxProperties& operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
return *this;
}
+ PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
@@ -47518,6 +67993,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47530,36 +68009,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+
};
static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
+ {
+ using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
+ };
+ using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
+
struct PhysicalDeviceSamplerYcbcrConversionFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT
- : samplerYcbcrConversion( samplerYcbcrConversion_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}) VULKAN_HPP_NOEXCEPT
+ : samplerYcbcrConversion( samplerYcbcrConversion_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) );
return *this;
}
@@ -47575,6 +68072,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
@@ -47585,6 +68083,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47596,35 +68098,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
+
};
static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
+ {
+ using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
+ };
+ using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
+
struct PhysicalDeviceScalarBlockLayoutFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT
- : scalarBlockLayout( scalarBlockLayout_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}) VULKAN_HPP_NOEXCEPT
+ : scalarBlockLayout( scalarBlockLayout_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) - offsetof( PhysicalDeviceScalarBlockLayoutFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceScalarBlockLayoutFeatures& operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) );
return *this;
}
@@ -47640,6 +68160,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
@@ -47650,6 +68171,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47661,35 +68186,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
+
};
static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
+ {
+ using Type = PhysicalDeviceScalarBlockLayoutFeatures;
+ };
+ using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+
struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+ : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) - offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceSeparateDepthStencilLayoutsFeatures& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) );
return *this;
}
@@ -47705,6 +68248,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
@@ -47715,6 +68259,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47726,37 +68274,228 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
+
};
static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceShaderAtomicInt64Features
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
+ {
+ using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ };
+ using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+ struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
- , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ), shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ), shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ), shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ), shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ), shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ), shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ), shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ), shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ), shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ), sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ), sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) );
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) - offsetof( PhysicalDeviceShaderAtomicInt64Features, pNext ) );
+ shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
return *this;
}
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics )
+ && ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd )
+ && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics )
+ && ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd )
+ && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics )
+ && ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd )
+ && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics )
+ && ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd )
+ && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics )
+ && ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd )
+ && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics )
+ && ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
+ }
+
+ bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
+ {
+ using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
+ };
+
+ struct PhysicalDeviceShaderAtomicInt64Features
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShaderAtomicInt64Features& operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderAtomicInt64Features ) );
return *this;
}
@@ -47778,6 +68517,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
@@ -47788,6 +68528,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47800,38 +68544,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64Features>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
+ {
+ using Type = PhysicalDeviceShaderAtomicInt64Features;
+ };
+ using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
+
struct PhysicalDeviceShaderClockFeaturesKHR
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupClock( shaderSubgroupClock_ )
- , shaderDeviceClock( shaderDeviceClock_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderSubgroupClock( shaderSubgroupClock_ ), shaderDeviceClock( shaderDeviceClock_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) - offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderClockFeaturesKHR ) );
return *this;
}
@@ -47853,6 +68613,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
@@ -47863,6 +68624,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47875,41 +68640,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
+ {
+ using Type = PhysicalDeviceShaderClockFeaturesKHR;
+ };
+
struct PhysicalDeviceShaderCoreProperties2AMD
{
- PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {},
- uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderCoreFeatures( shaderCoreFeatures_ )
- , activeComputeUnitCount( activeComputeUnitCount_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderCoreFeatures( shaderCoreFeatures_ ), activeComputeUnitCount( activeComputeUnitCount_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
return *this;
}
+ PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
@@ -47920,6 +68701,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -47932,65 +68717,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
void* pNext = {};
VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
uint32_t activeComputeUnitCount = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
+ {
+ using Type = PhysicalDeviceShaderCoreProperties2AMD;
+ };
+
struct PhysicalDeviceShaderCorePropertiesAMD
{
- PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {},
- uint32_t shaderArraysPerEngineCount_ = {},
- uint32_t computeUnitsPerShaderArray_ = {},
- uint32_t simdPerComputeUnit_ = {},
- uint32_t wavefrontsPerSimd_ = {},
- uint32_t wavefrontSize_ = {},
- uint32_t sgprsPerSimd_ = {},
- uint32_t minSgprAllocation_ = {},
- uint32_t maxSgprAllocation_ = {},
- uint32_t sgprAllocationGranularity_ = {},
- uint32_t vgprsPerSimd_ = {},
- uint32_t minVgprAllocation_ = {},
- uint32_t maxVgprAllocation_ = {},
- uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderEngineCount( shaderEngineCount_ )
- , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ )
- , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ )
- , simdPerComputeUnit( simdPerComputeUnit_ )
- , wavefrontsPerSimd( wavefrontsPerSimd_ )
- , wavefrontSize( wavefrontSize_ )
- , sgprsPerSimd( sgprsPerSimd_ )
- , minSgprAllocation( minSgprAllocation_ )
- , maxSgprAllocation( maxSgprAllocation_ )
- , sgprAllocationGranularity( sgprAllocationGranularity_ )
- , vgprsPerSimd( vgprsPerSimd_ )
- , minVgprAllocation( minVgprAllocation_ )
- , maxVgprAllocation( maxVgprAllocation_ )
- , vgprAllocationGranularity( vgprAllocationGranularity_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderEngineCount( shaderEngineCount_ ), shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ), computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ), simdPerComputeUnit( simdPerComputeUnit_ ), wavefrontsPerSimd( wavefrontsPerSimd_ ), wavefrontSize( wavefrontSize_ ), sgprsPerSimd( sgprsPerSimd_ ), minSgprAllocation( minSgprAllocation_ ), maxSgprAllocation( maxSgprAllocation_ ), sgprAllocationGranularity( sgprAllocationGranularity_ ), vgprsPerSimd( vgprsPerSimd_ ), minVgprAllocation( minVgprAllocation_ ), maxVgprAllocation( maxVgprAllocation_ ), vgprAllocationGranularity( vgprAllocationGranularity_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
return *this;
}
+ PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
@@ -48001,6 +68778,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48025,6 +68806,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
@@ -48043,30 +68827,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t minVgprAllocation = {};
uint32_t maxVgprAllocation = {};
uint32_t vgprAllocationGranularity = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
+ {
+ using Type = PhysicalDeviceShaderCorePropertiesAMD;
+ };
+
struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) );
return *this;
}
@@ -48082,6 +68880,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
@@ -48092,6 +68891,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48103,35 +68906,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>
+ {
+ using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+ };
+
struct PhysicalDeviceShaderDrawParametersFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderDrawParameters( shaderDrawParameters_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderDrawParameters( shaderDrawParameters_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) );
return *this;
}
@@ -48147,6 +68967,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
@@ -48157,6 +68978,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48168,37 +68993,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
+ {
+ using Type = PhysicalDeviceShaderDrawParametersFeatures;
+ };
+ using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+
struct PhysicalDeviceShaderFloat16Int8Features
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderFloat16( shaderFloat16_ )
- , shaderInt8( shaderInt8_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) - offsetof( PhysicalDeviceShaderFloat16Int8Features, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShaderFloat16Int8Features& operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderFloat16Int8Features ) );
return *this;
}
@@ -48220,6 +69061,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
@@ -48230,6 +69072,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48242,36 +69088,55 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8Features>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
+ {
+ using Type = PhysicalDeviceShaderFloat16Int8Features;
+ };
+ using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+ using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+
struct PhysicalDeviceShaderImageFootprintFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageFootprint( imageFootprint_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}) VULKAN_HPP_NOEXCEPT
+ : imageFootprint( imageFootprint_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) );
return *this;
}
@@ -48287,6 +69152,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
@@ -48297,6 +69163,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48308,35 +69178,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
+ {
+ using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
+ };
+
struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) );
return *this;
}
@@ -48352,6 +69239,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
@@ -48362,6 +69250,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48373,35 +69265,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
+ {
+ using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+ };
+
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSMBuiltins( shaderSMBuiltins_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderSMBuiltins( shaderSMBuiltins_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) );
return *this;
}
@@ -48417,6 +69326,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
@@ -48427,6 +69337,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48438,40 +69352,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
+ {
+ using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+ };
+
struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
{
- PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {},
- uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSMCount( shaderSMCount_ )
- , shaderWarpsPerSM( shaderWarpsPerSM_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderSMCount( shaderSMCount_ ), shaderWarpsPerSM( shaderWarpsPerSM_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) );
return *this;
}
+
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
@@ -48482,6 +69412,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48494,36 +69428,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
void* pNext = {};
uint32_t shaderSMCount = {};
uint32_t shaderWarpsPerSM = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
+ {
+ using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+ };
+
struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) - offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShaderSubgroupExtendedTypesFeatures& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) );
return *this;
}
@@ -48539,6 +69490,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
@@ -48549,6 +69501,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48560,37 +69516,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
+
};
static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
+ {
+ using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ };
+ using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
struct PhysicalDeviceShadingRateImageFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRateImage( shadingRateImage_ )
- , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}) VULKAN_HPP_NOEXCEPT
+ : shadingRateImage( shadingRateImage_ ), shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) );
return *this;
}
@@ -48612,6 +69584,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
@@ -48622,6 +69595,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48634,43 +69611,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
+
};
static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
+ {
+ using Type = PhysicalDeviceShadingRateImageFeaturesNV;
+ };
+
struct PhysicalDeviceShadingRateImagePropertiesNV
{
- PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {},
- uint32_t shadingRatePaletteSize_ = {},
- uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRateTexelSize( shadingRateTexelSize_ )
- , shadingRatePaletteSize( shadingRatePaletteSize_ )
- , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}) VULKAN_HPP_NOEXCEPT
+ : shadingRateTexelSize( shadingRateTexelSize_ ), shadingRatePaletteSize( shadingRatePaletteSize_ ), shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) );
return *this;
}
+
operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
@@ -48681,6 +69672,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default;
+#else
bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48694,6 +69689,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
@@ -48701,144 +69699,48 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
uint32_t shadingRatePaletteSize = {};
uint32_t shadingRateMaxCoarseSamples = {};
+
};
static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceSparseImageFormatInfo2
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , type( type_ )
- , samples( samples_ )
- , usage( usage_ )
- , tiling( tiling_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) );
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>(&rhs);
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
- {
- format = format_;
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
- {
- type = type_;
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
- {
- samples = samples_;
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
- {
- usage = usage_;
- return *this;
- }
-
- PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
- {
- tiling = tiling_;
- return *this;
- }
-
- operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
- }
-
- operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
- }
-
- bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( format == rhs.format )
- && ( type == rhs.type )
- && ( samples == rhs.samples )
- && ( usage == rhs.usage )
- && ( tiling == rhs.tiling );
- }
-
- bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
- VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+ using Type = PhysicalDeviceShadingRateImagePropertiesNV;
};
- static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceSubgroupProperties
{
- PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {},
- VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT
- : subgroupSize( subgroupSize_ )
- , supportedStages( supportedStages_ )
- , supportedOperations( supportedOperations_ )
- , quadOperationsInAllStages( quadOperationsInAllStages_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}) VULKAN_HPP_NOEXCEPT
+ : subgroupSize( subgroupSize_ ), supportedStages( supportedStages_ ), supportedOperations( supportedOperations_ ), quadOperationsInAllStages( quadOperationsInAllStages_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSubgroupProperties ) );
return *this;
}
+
operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
@@ -48849,6 +69751,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48863,6 +69769,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
@@ -48871,32 +69780,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
+
};
static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
+ {
+ using Type = PhysicalDeviceSubgroupProperties;
+ };
+
struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT
- : subgroupSizeControl( subgroupSizeControl_ )
- , computeFullSubgroups( computeFullSubgroups_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}) VULKAN_HPP_NOEXCEPT
+ : subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) );
return *this;
}
@@ -48918,6 +69839,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
@@ -48928,6 +69850,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -48940,45 +69866,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
+
};
static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT>
+ {
+ using Type = PhysicalDeviceSubgroupSizeControlFeaturesEXT;
+ };
+
struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
{
- PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {},
- uint32_t maxSubgroupSize_ = {},
- uint32_t maxComputeWorkgroupSubgroups_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT
- : minSubgroupSize( minSubgroupSize_ )
- , maxSubgroupSize( maxSubgroupSize_ )
- , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
- , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}) VULKAN_HPP_NOEXCEPT
+ : minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
@@ -48989,6 +69927,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49003,6 +69945,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
@@ -49011,95 +69956,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxSubgroupSize = {};
uint32_t maxComputeWorkgroupSubgroups = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
+
};
static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceSurfaceInfo2KHR
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT>
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT
- : surface( surface_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) );
- return *this;
- }
-
- PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>(&rhs);
- return *this;
- }
-
- PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
- {
- surface = surface_;
- return *this;
- }
-
- operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
- }
-
- operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
- }
-
- bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( surface == rhs.surface );
- }
-
- bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+ using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT;
};
- static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : texelBufferAlignment( texelBufferAlignment_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+ : texelBufferAlignment( texelBufferAlignment_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) );
return *this;
}
@@ -49115,6 +70009,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
@@ -49125,6 +70020,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49136,44 +70035,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
+
};
static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
+ {
+ using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+ };
+
struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
{
- PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
- , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
- , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
- , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+ : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
@@ -49184,6 +70095,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49198,6 +70113,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
@@ -49206,30 +70124,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
+
};
static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT>
+ {
+ using Type = PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+ };
+
struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT
- : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}) VULKAN_HPP_NOEXCEPT
+ : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) );
return *this;
}
@@ -49245,6 +70177,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
@@ -49255,6 +70188,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49266,35 +70203,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
+
};
static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT>
+ {
+ using Type = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+ };
+
struct PhysicalDeviceTimelineSemaphoreFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) VULKAN_HPP_NOEXCEPT
- : timelineSemaphore( timelineSemaphore_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}) VULKAN_HPP_NOEXCEPT
+ : timelineSemaphore( timelineSemaphore_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) - offsetof( PhysicalDeviceTimelineSemaphoreFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceTimelineSemaphoreFeatures& operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) );
return *this;
}
@@ -49310,6 +70264,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
@@ -49320,6 +70275,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49331,38 +70290,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
+
};
static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
+ {
+ using Type = PhysicalDeviceTimelineSemaphoreFeatures;
+ };
+ using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
+
struct PhysicalDeviceTimelineSemaphoreProperties
{
- PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) - offsetof( PhysicalDeviceTimelineSemaphoreProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceTimelineSemaphoreProperties& operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
return *this;
}
+ PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTimelineSemaphoreProperties ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
@@ -49373,6 +70351,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const& ) const = default;
+#else
bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49384,111 +70366,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
void* pNext = {};
uint64_t maxTimelineSemaphoreValueDifference = {};
+
};
static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceToolPropertiesEXT
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
{
- PhysicalDeviceToolPropertiesEXT( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& name_ = {},
- std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& version_ = {},
- VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
- std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layer_ = {} ) VULKAN_HPP_NOEXCEPT
- : name{}
- , version{}
- , purposes( purposes_ )
- , description{}
- , layer{}
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( name, name_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( version, version_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( layer, layer_ );
- }
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) );
- return *this;
- }
-
- PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceToolPropertiesEXT& operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>(&rhs);
- return *this;
- }
-
- operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT*>( this );
- }
-
- operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( this );
- }
-
- bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( memcmp( name, rhs.name, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( version, rhs.version, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( purposes == rhs.purposes )
- && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( layer, rhs.layer, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 );
- }
-
- bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
- void* pNext = {};
- char name[VK_MAX_EXTENSION_NAME_SIZE] = {};
- char version[VK_MAX_EXTENSION_NAME_SIZE] = {};
- VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {};
- char description[VK_MAX_DESCRIPTION_SIZE] = {};
- char layer[VK_MAX_EXTENSION_NAME_SIZE] = {};
+ using Type = PhysicalDeviceTimelineSemaphoreProperties;
};
- static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
struct PhysicalDeviceTransformFeedbackFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT
- : transformFeedback( transformFeedback_ )
- , geometryStreams( geometryStreams_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}) VULKAN_HPP_NOEXCEPT
+ : transformFeedback( transformFeedback_ ), geometryStreams( geometryStreams_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) );
return *this;
}
@@ -49510,6 +70434,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
@@ -49520,6 +70445,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49532,57 +70461,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
+
};
static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
+ {
+ using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
+ };
+
struct PhysicalDeviceTransformFeedbackPropertiesEXT
{
- PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {},
- uint32_t maxTransformFeedbackBuffers_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {},
- uint32_t maxTransformFeedbackStreamDataSize_ = {},
- uint32_t maxTransformFeedbackBufferDataSize_ = {},
- uint32_t maxTransformFeedbackBufferDataStride_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
- , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ )
- , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ )
- , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ )
- , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ )
- , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ )
- , transformFeedbackQueries( transformFeedbackQueries_ )
- , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ )
- , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ )
- , transformFeedbackDraw( transformFeedbackDraw_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ), maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ), maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ), maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ), maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ), maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ), transformFeedbackQueries( transformFeedbackQueries_ ), transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ), transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ), transformFeedbackDraw( transformFeedbackDraw_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) );
return *this;
}
+
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
@@ -49593,6 +70522,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49613,6 +70546,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
@@ -49627,30 +70563,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
+
};
static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
+ {
+ using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
+ };
+
struct PhysicalDeviceUniformBufferStandardLayoutFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT
- : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}) VULKAN_HPP_NOEXCEPT
+ : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceUniformBufferStandardLayoutFeatures& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) );
return *this;
}
@@ -49666,6 +70616,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
@@ -49676,6 +70627,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49687,37 +70642,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
+
};
static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
+ {
+ using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+ };
+ using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+
struct PhysicalDeviceVariablePointersFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT
- : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
- , variablePointers( variablePointers_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}) VULKAN_HPP_NOEXCEPT
+ : variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVariablePointersFeatures ) );
return *this;
}
@@ -49739,6 +70710,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
@@ -49749,6 +70721,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49761,38 +70737,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
+
};
static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
+ {
+ using Type = PhysicalDeviceVariablePointersFeatures;
+ };
+ using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
+ using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+ using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+
struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
- , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}) VULKAN_HPP_NOEXCEPT
+ : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ), vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) );
return *this;
}
@@ -49814,6 +70808,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
@@ -49824,6 +70819,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49836,39 +70835,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
+
};
static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
+ {
+ using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+ };
+
struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
{
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}) VULKAN_HPP_NOEXCEPT
+ : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
return *this;
}
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) );
+ return *this;
+ }
+
+
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
@@ -49879,6 +70896,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -49890,57 +70911,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
void* pNext = {};
uint32_t maxVertexAttribDivisor = {};
+
};
static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
+ {
+ using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+ };
+
struct PhysicalDeviceVulkan11Features
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
- , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
- , storagePushConstant16( storagePushConstant16_ )
- , storageInputOutput16( storageInputOutput16_ )
- , multiview( multiview_ )
- , multiviewGeometryShader( multiviewGeometryShader_ )
- , multiviewTessellationShader( multiviewTessellationShader_ )
- , variablePointersStorageBuffer( variablePointersStorageBuffer_ )
- , variablePointers( variablePointers_ )
- , protectedMemory( protectedMemory_ )
- , samplerYcbcrConversion( samplerYcbcrConversion_ )
- , shaderDrawParameters( shaderDrawParameters_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) - offsetof( PhysicalDeviceVulkan11Features, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
+ : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ), protectedMemory( protectedMemory_ ), samplerYcbcrConversion( samplerYcbcrConversion_ ), shaderDrawParameters( shaderDrawParameters_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceVulkan11Features& operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan11Features ) );
return *this;
}
@@ -50022,6 +71038,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
@@ -50032,6 +71049,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVulkan11Features const& ) const = default;
+#else
bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -50054,6 +71075,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
@@ -50070,160 +71094,47 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+
};
static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceVulkan11Properties
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
{
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {},
- std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {},
- std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {},
- uint32_t deviceNodeMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {},
- uint32_t subgroupSize_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {},
- VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {},
- VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
- uint32_t maxMultiviewViewCount_ = {},
- uint32_t maxMultiviewInstanceIndex_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
- uint32_t maxPerSetDescriptors_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceUUID{}
- , driverUUID{}
- , deviceLUID{}
- , deviceNodeMask( deviceNodeMask_ )
- , deviceLUIDValid( deviceLUIDValid_ )
- , subgroupSize( subgroupSize_ )
- , subgroupSupportedStages( subgroupSupportedStages_ )
- , subgroupSupportedOperations( subgroupSupportedOperations_ )
- , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ )
- , pointClippingBehavior( pointClippingBehavior_ )
- , maxMultiviewViewCount( maxMultiviewViewCount_ )
- , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
- , protectedNoFault( protectedNoFault_ )
- , maxPerSetDescriptors( maxPerSetDescriptors_ )
- , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( deviceUUID, deviceUUID_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( driverUUID, driverUUID_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_LUID_SIZE,VK_LUID_SIZE>::copy( deviceLUID, deviceLUID_ );
- }
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) - offsetof( PhysicalDeviceVulkan11Properties, pNext ) );
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceVulkan11Properties& operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>(&rhs);
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setDeviceUUID( std::array<uint8_t,VK_UUID_SIZE> deviceUUID_ ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( deviceUUID, deviceUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) );
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setDriverUUID( std::array<uint8_t,VK_UUID_SIZE> driverUUID_ ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( driverUUID, driverUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) );
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setDeviceLUID( std::array<uint8_t,VK_LUID_SIZE> deviceLUID_ ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( deviceLUID, deviceLUID_.data(), VK_LUID_SIZE * sizeof( uint8_t ) );
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setDeviceNodeMask( uint32_t deviceNodeMask_ ) VULKAN_HPP_NOEXCEPT
- {
- deviceNodeMask = deviceNodeMask_;
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setDeviceLUIDValid( VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ ) VULKAN_HPP_NOEXCEPT
- {
- deviceLUIDValid = deviceLUIDValid_;
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setSubgroupSize( uint32_t subgroupSize_ ) VULKAN_HPP_NOEXCEPT
- {
- subgroupSize = subgroupSize_;
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setSubgroupSupportedStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ ) VULKAN_HPP_NOEXCEPT
- {
- subgroupSupportedStages = subgroupSupportedStages_;
- return *this;
- }
-
- PhysicalDeviceVulkan11Properties & setSubgroupSupportedOperations( VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ ) VULKAN_HPP_NOEXCEPT
- {
- subgroupSupportedOperations = subgroupSupportedOperations_;
- return *this;
- }
+ using Type = PhysicalDeviceVulkan11Features;
+ };
- PhysicalDeviceVulkan11Properties & setSubgroupQuadOperationsInAllStages( VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ ) VULKAN_HPP_NOEXCEPT
- {
- subgroupQuadOperationsInAllStages = subgroupQuadOperationsInAllStages_;
- return *this;
- }
+ struct PhysicalDeviceVulkan11Properties
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
- PhysicalDeviceVulkan11Properties & setPointClippingBehavior( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ ) VULKAN_HPP_NOEXCEPT
- {
- pointClippingBehavior = pointClippingBehavior_;
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ), subgroupSize( subgroupSize_ ), subgroupSupportedStages( subgroupSupportedStages_ ), subgroupSupportedOperations( subgroupSupportedOperations_ ), subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ), pointClippingBehavior( pointClippingBehavior_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ), protectedNoFault( protectedNoFault_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+ {}
- PhysicalDeviceVulkan11Properties & setMaxMultiviewViewCount( uint32_t maxMultiviewViewCount_ ) VULKAN_HPP_NOEXCEPT
- {
- maxMultiviewViewCount = maxMultiviewViewCount_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVulkan11Properties & setMaxMultiviewInstanceIndex( uint32_t maxMultiviewInstanceIndex_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- maxMultiviewInstanceIndex = maxMultiviewInstanceIndex_;
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceVulkan11Properties & setProtectedNoFault( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protectedNoFault = protectedNoFault_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
return *this;
}
- PhysicalDeviceVulkan11Properties & setMaxPerSetDescriptors( uint32_t maxPerSetDescriptors_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- maxPerSetDescriptors = maxPerSetDescriptors_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan11Properties ) );
return *this;
}
- PhysicalDeviceVulkan11Properties & setMaxMemoryAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ ) VULKAN_HPP_NOEXCEPT
- {
- maxMemoryAllocationSize = maxMemoryAllocationSize_;
- return *this;
- }
operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT
{
@@ -50235,13 +71146,17 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVulkan11Properties const& ) const = default;
+#else
bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
- && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
- && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( deviceUUID == rhs.deviceUUID )
+ && ( driverUUID == rhs.driverUUID )
+ && ( deviceLUID == rhs.deviceLUID )
&& ( deviceNodeMask == rhs.deviceNodeMask )
&& ( deviceLUIDValid == rhs.deviceLUIDValid )
&& ( subgroupSize == rhs.subgroupSize )
@@ -50260,13 +71175,16 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
void* pNext = {};
- uint8_t deviceUUID[VK_UUID_SIZE] = {};
- uint8_t driverUUID[VK_UUID_SIZE] = {};
- uint8_t deviceLUID[VK_LUID_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
uint32_t deviceNodeMask = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
uint32_t subgroupSize = {};
@@ -50279,122 +71197,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
uint32_t maxPerSetDescriptors = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+
};
static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
+ {
+ using Type = PhysicalDeviceVulkan11Properties;
+ };
+
struct PhysicalDeviceVulkan12Features
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT
- : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ )
- , drawIndirectCount( drawIndirectCount_ )
- , storageBuffer8BitAccess( storageBuffer8BitAccess_ )
- , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
- , storagePushConstant8( storagePushConstant8_ )
- , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
- , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
- , shaderFloat16( shaderFloat16_ )
- , shaderInt8( shaderInt8_ )
- , descriptorIndexing( descriptorIndexing_ )
- , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
- , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
- , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
- , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
- , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
- , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
- , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
- , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
- , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
- , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
- , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
- , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
- , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
- , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
- , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
- , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
- , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
- , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
- , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
- , runtimeDescriptorArray( runtimeDescriptorArray_ )
- , samplerFilterMinmax( samplerFilterMinmax_ )
- , scalarBlockLayout( scalarBlockLayout_ )
- , imagelessFramebuffer( imagelessFramebuffer_ )
- , uniformBufferStandardLayout( uniformBufferStandardLayout_ )
- , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
- , separateDepthStencilLayouts( separateDepthStencilLayouts_ )
- , hostQueryReset( hostQueryReset_ )
- , timelineSemaphore( timelineSemaphore_ )
- , bufferDeviceAddress( bufferDeviceAddress_ )
- , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
- , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
- , vulkanMemoryModel( vulkanMemoryModel_ )
- , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
- , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
- , shaderOutputViewportIndex( shaderOutputViewportIndex_ )
- , shaderOutputLayer( shaderOutputLayer_ )
- , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) - offsetof( PhysicalDeviceVulkan12Features, pNext ) );
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}) VULKAN_HPP_NOEXCEPT
+ : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ), drawIndirectCount( drawIndirectCount_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ), descriptorIndexing( descriptorIndexing_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ), samplerFilterMinmax( samplerFilterMinmax_ ), scalarBlockLayout( scalarBlockLayout_ ), imagelessFramebuffer( imagelessFramebuffer_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ ), hostQueryReset( hostQueryReset_ ), timelineSemaphore( timelineSemaphore_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ), shaderOutputViewportIndex( shaderOutputViewportIndex_ ), shaderOutputLayer( shaderOutputLayer_ ), subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceVulkan12Features& operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan12Features ) );
return *this;
}
@@ -50686,6 +71526,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
@@ -50696,6 +71537,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVulkan12Features const& ) const = default;
+#else
bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -50753,6 +71598,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
@@ -50804,455 +71652,47 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
+
};
static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceVulkan12Properties
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
{
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
- std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {},
- std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {},
- VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
- uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
- uint32_t maxPerStageUpdateAfterBindResources_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
- uint64_t maxTimelineSemaphoreValueDifference_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT
- : driverID( driverID_ )
- , driverName{}
- , driverInfo{}
- , conformanceVersion( conformanceVersion_ )
- , denormBehaviorIndependence( denormBehaviorIndependence_ )
- , roundingModeIndependence( roundingModeIndependence_ )
- , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
- , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
- , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
- , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
- , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
- , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
- , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
- , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
- , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
- , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
- , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
- , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
- , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
- , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
- , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
- , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
- , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
- , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
- , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
- , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
- , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
- , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
- , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
- , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
- , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
- , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
- , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
- , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
- , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
- , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
- , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
- , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
- , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
- , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
- , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
- , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
- , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
- , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
- , supportedDepthResolveModes( supportedDepthResolveModes_ )
- , supportedStencilResolveModes( supportedStencilResolveModes_ )
- , independentResolveNone( independentResolveNone_ )
- , independentResolve( independentResolve_ )
- , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
- , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
- , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
- , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE,VK_MAX_DRIVER_NAME_SIZE>::copy( driverName, driverName_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE,VK_MAX_DRIVER_INFO_SIZE>::copy( driverInfo, driverInfo_ );
- }
-
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) - offsetof( PhysicalDeviceVulkan12Properties, pNext ) );
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PhysicalDeviceVulkan12Properties& operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>(&rhs);
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setDriverID( VULKAN_HPP_NAMESPACE::DriverId driverID_ ) VULKAN_HPP_NOEXCEPT
- {
- driverID = driverID_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setDriverName( std::array<char,VK_MAX_DRIVER_NAME_SIZE> driverName_ ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( driverName, driverName_.data(), VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) );
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setDriverInfo( std::array<char,VK_MAX_DRIVER_INFO_SIZE> driverInfo_ ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( driverInfo, driverInfo_.data(), VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) );
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setConformanceVersion( VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ ) VULKAN_HPP_NOEXCEPT
- {
- conformanceVersion = conformanceVersion_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setDenormBehaviorIndependence( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ ) VULKAN_HPP_NOEXCEPT
- {
- denormBehaviorIndependence = denormBehaviorIndependence_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setRoundingModeIndependence( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ ) VULKAN_HPP_NOEXCEPT
- {
- roundingModeIndependence = roundingModeIndependence_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderSignedZeroInfNanPreserveFloat16 = shaderSignedZeroInfNanPreserveFloat16_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderSignedZeroInfNanPreserveFloat32 = shaderSignedZeroInfNanPreserveFloat32_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderSignedZeroInfNanPreserveFloat64 = shaderSignedZeroInfNanPreserveFloat64_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderDenormPreserveFloat16 = shaderDenormPreserveFloat16_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderDenormPreserveFloat32 = shaderDenormPreserveFloat32_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderDenormPreserveFloat64 = shaderDenormPreserveFloat64_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderDenormFlushToZeroFloat16 = shaderDenormFlushToZeroFloat16_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderDenormFlushToZeroFloat32 = shaderDenormFlushToZeroFloat32_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderDenormFlushToZeroFloat64 = shaderDenormFlushToZeroFloat64_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderRoundingModeRTEFloat16 = shaderRoundingModeRTEFloat16_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderRoundingModeRTEFloat32 = shaderRoundingModeRTEFloat32_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderRoundingModeRTEFloat64 = shaderRoundingModeRTEFloat64_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderRoundingModeRTZFloat16 = shaderRoundingModeRTZFloat16_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderRoundingModeRTZFloat32 = shaderRoundingModeRTZFloat32_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderRoundingModeRTZFloat64 = shaderRoundingModeRTZFloat64_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxUpdateAfterBindDescriptorsInAllPools( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ ) VULKAN_HPP_NOEXCEPT
- {
- maxUpdateAfterBindDescriptorsInAllPools = maxUpdateAfterBindDescriptorsInAllPools_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderUniformBufferArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderUniformBufferArrayNonUniformIndexingNative = shaderUniformBufferArrayNonUniformIndexingNative_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderSampledImageArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderSampledImageArrayNonUniformIndexingNative = shaderSampledImageArrayNonUniformIndexingNative_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderStorageBufferArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderStorageBufferArrayNonUniformIndexingNative = shaderStorageBufferArrayNonUniformIndexingNative_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderStorageImageArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderStorageImageArrayNonUniformIndexingNative = shaderStorageImageArrayNonUniformIndexingNative_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setShaderInputAttachmentArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
- {
- shaderInputAttachmentArrayNonUniformIndexingNative = shaderInputAttachmentArrayNonUniformIndexingNative_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setRobustBufferAccessUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
- {
- robustBufferAccessUpdateAfterBind = robustBufferAccessUpdateAfterBind_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setQuadDivergentImplicitLod( VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ ) VULKAN_HPP_NOEXCEPT
- {
- quadDivergentImplicitLod = quadDivergentImplicitLod_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindSamplers( uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageDescriptorUpdateAfterBindSamplers = maxPerStageDescriptorUpdateAfterBindSamplers_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindUniformBuffers( uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageDescriptorUpdateAfterBindUniformBuffers = maxPerStageDescriptorUpdateAfterBindUniformBuffers_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindStorageBuffers( uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageDescriptorUpdateAfterBindStorageBuffers = maxPerStageDescriptorUpdateAfterBindStorageBuffers_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindSampledImages( uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageDescriptorUpdateAfterBindSampledImages = maxPerStageDescriptorUpdateAfterBindSampledImages_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindStorageImages( uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageDescriptorUpdateAfterBindStorageImages = maxPerStageDescriptorUpdateAfterBindStorageImages_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindInputAttachments( uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageDescriptorUpdateAfterBindInputAttachments = maxPerStageDescriptorUpdateAfterBindInputAttachments_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxPerStageUpdateAfterBindResources( uint32_t maxPerStageUpdateAfterBindResources_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPerStageUpdateAfterBindResources = maxPerStageUpdateAfterBindResources_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindSamplers( uint32_t maxDescriptorSetUpdateAfterBindSamplers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindSamplers = maxDescriptorSetUpdateAfterBindSamplers_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindUniformBuffers( uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindUniformBuffers = maxDescriptorSetUpdateAfterBindUniformBuffers_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindUniformBuffersDynamic( uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageBuffers( uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindStorageBuffers = maxDescriptorSetUpdateAfterBindStorageBuffers_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageBuffersDynamic( uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindSampledImages( uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindSampledImages = maxDescriptorSetUpdateAfterBindSampledImages_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageImages( uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindStorageImages = maxDescriptorSetUpdateAfterBindStorageImages_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindInputAttachments( uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDescriptorSetUpdateAfterBindInputAttachments = maxDescriptorSetUpdateAfterBindInputAttachments_;
- return *this;
- }
+ using Type = PhysicalDeviceVulkan12Features;
+ };
- PhysicalDeviceVulkan12Properties & setSupportedDepthResolveModes( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ ) VULKAN_HPP_NOEXCEPT
- {
- supportedDepthResolveModes = supportedDepthResolveModes_;
- return *this;
- }
+ struct PhysicalDeviceVulkan12Properties
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
- PhysicalDeviceVulkan12Properties & setSupportedStencilResolveModes( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ ) VULKAN_HPP_NOEXCEPT
- {
- supportedStencilResolveModes = supportedStencilResolveModes_;
- return *this;
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}) VULKAN_HPP_NOEXCEPT
+ : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ), framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
+ {}
- PhysicalDeviceVulkan12Properties & setIndependentResolveNone( VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ ) VULKAN_HPP_NOEXCEPT
- {
- independentResolveNone = independentResolveNone_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVulkan12Properties & setIndependentResolve( VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- independentResolve = independentResolve_;
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PhysicalDeviceVulkan12Properties & setFilterMinmaxSingleComponentFormats( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- filterMinmaxSingleComponentFormats = filterMinmaxSingleComponentFormats_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
return *this;
}
- PhysicalDeviceVulkan12Properties & setFilterMinmaxImageComponentMapping( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- filterMinmaxImageComponentMapping = filterMinmaxImageComponentMapping_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan12Properties ) );
return *this;
}
- PhysicalDeviceVulkan12Properties & setMaxTimelineSemaphoreValueDifference( uint64_t maxTimelineSemaphoreValueDifference_ ) VULKAN_HPP_NOEXCEPT
- {
- maxTimelineSemaphoreValueDifference = maxTimelineSemaphoreValueDifference_;
- return *this;
- }
-
- PhysicalDeviceVulkan12Properties & setFramebufferIntegerColorSampleCounts( VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ ) VULKAN_HPP_NOEXCEPT
- {
- framebufferIntegerColorSampleCounts = framebufferIntegerColorSampleCounts_;
- return *this;
- }
operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT
{
@@ -51264,13 +71704,17 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVulkan12Properties const& ) const = default;
+#else
bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( driverID == rhs.driverID )
- && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ) == 0 )
+ && ( driverName == rhs.driverName )
+ && ( driverInfo == rhs.driverInfo )
&& ( conformanceVersion == rhs.conformanceVersion )
&& ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
&& ( roundingModeIndependence == rhs.roundingModeIndependence )
@@ -51326,13 +71770,16 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
void* pNext = {};
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
- char driverName[VK_MAX_DRIVER_NAME_SIZE] = {};
- char driverInfo[VK_MAX_DRIVER_INFO_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
@@ -51382,34 +71829,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
uint64_t maxTimelineSemaphoreValueDifference = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
+
};
static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
+ {
+ using Type = PhysicalDeviceVulkan12Properties;
+ };
+
struct PhysicalDeviceVulkanMemoryModelFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT
- : vulkanMemoryModel( vulkanMemoryModel_ )
- , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
- , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}) VULKAN_HPP_NOEXCEPT
+ : vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) - offsetof( PhysicalDeviceVulkanMemoryModelFeatures, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceVulkanMemoryModelFeatures& operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) );
return *this;
}
@@ -51437,6 +71894,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
@@ -51447,6 +71905,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const& ) const = default;
+#else
bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -51460,6 +71922,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
@@ -51467,30 +71932,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+
};
static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeatures>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
+ {
+ using Type = PhysicalDeviceVulkanMemoryModelFeatures;
+ };
+ using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
+
struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT
- : ycbcrImageArrays( ycbcrImageArrays_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}) VULKAN_HPP_NOEXCEPT
+ : ycbcrImageArrays( ycbcrImageArrays_ )
{}
- VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
+ return *this;
+ }
- PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) );
return *this;
}
@@ -51506,6 +71986,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
@@ -51516,6 +71997,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default;
+#else
bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -51527,124 +72012,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
+
};
static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- struct PipelineCacheCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
{
- VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {},
- size_t initialDataSize_ = {},
- const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , initialDataSize( initialDataSize_ )
- , pInitialData( pInitialData_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) - offsetof( PipelineCacheCreateInfo, pNext ) );
- return *this;
- }
-
- PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>(&rhs);
- return *this;
- }
-
- PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
- {
- initialDataSize = initialDataSize_;
- return *this;
- }
-
- PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
- {
- pInitialData = pInitialData_;
- return *this;
- }
-
- operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
- }
-
- operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
- }
-
- bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( initialDataSize == rhs.initialDataSize )
- && ( pInitialData == rhs.pInitialData );
- }
-
- bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
- size_t initialDataSize = {};
- const void* pInitialData = {};
+ using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
};
- static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct PipelineColorBlendAdvancedStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
- VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT
- : srcPremultiplied( srcPremultiplied_ )
- , dstPremultiplied( dstPremultiplied_ )
- , blendOverlap( blendOverlap_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated) VULKAN_HPP_NOEXCEPT
+ : srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
- PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
return *this;
}
@@ -51672,6 +72085,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
@@ -51682,6 +72096,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -51695,6 +72113,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
@@ -51702,30 +72123,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
+
};
static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
+ {
+ using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
+ };
+
struct PipelineCompilerControlCreateInfoAMD
{
- VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : compilerControlFlags( compilerControlFlags_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}) VULKAN_HPP_NOEXCEPT
+ : compilerControlFlags( compilerControlFlags_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
+ return *this;
+ }
- PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCompilerControlCreateInfoAMD ) );
return *this;
}
@@ -51741,6 +72176,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
@@ -51751,6 +72187,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default;
+#else
bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -51762,43 +72202,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
+
};
static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
+ {
+ using Type = PipelineCompilerControlCreateInfoAMD;
+ };
+
struct PipelineCoverageModulationStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone,
- VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {},
- uint32_t coverageModulationTableCount_ = {},
- const float* pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , coverageModulationMode( coverageModulationMode_ )
- , coverageModulationTableEnable( coverageModulationTableEnable_ )
- , coverageModulationTableCount( coverageModulationTableCount_ )
- , pCoverageModulationTable( pCoverageModulationTable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float* pCoverageModulationTable_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( coverageModulationTableCount_ ), pCoverageModulationTable( pCoverageModulationTable_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ )
+ : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
return *this;
}
@@ -51838,6 +72293,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
+ pCoverageModulationTable = coverageModulationTable_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
@@ -51848,6 +72313,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -51863,6 +72332,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
@@ -51872,32 +72344,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
uint32_t coverageModulationTableCount = {};
const float* pCoverageModulationTable = {};
+
};
static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
+ {
+ using Type = PipelineCoverageModulationStateCreateInfoNV;
+ };
+
struct PipelineCoverageReductionStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , coverageReductionMode( coverageReductionMode_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), coverageReductionMode( coverageReductionMode_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
- PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCoverageReductionStateCreateInfoNV ) );
return *this;
}
@@ -51919,6 +72403,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
@@ -51929,6 +72414,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -51941,40 +72430,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+
};
static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
+ {
+ using Type = PipelineCoverageReductionStateCreateInfoNV;
+ };
+
struct PipelineCoverageToColorStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {},
- uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , coverageToColorEnable( coverageToColorEnable_ )
- , coverageToColorLocation( coverageToColorLocation_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), coverageToColorEnable( coverageToColorEnable_ ), coverageToColorLocation( coverageToColorLocation_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
- PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
return *this;
}
@@ -52002,6 +72504,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
@@ -52012,6 +72515,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -52025,6 +72532,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
@@ -52032,29 +72542,47 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
uint32_t coverageToColorLocation = {};
+
};
static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
+ {
+ using Type = PipelineCoverageToColorStateCreateInfoNV;
+ };
+
struct PipelineCreationFeedbackEXT
{
- PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {},
- uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , duration( duration_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), duration( duration_ )
{}
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>( &rhs );
+ return *this;
+ }
- PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCreationFeedbackEXT & operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCreationFeedbackEXT ) );
return *this;
}
+
operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreationFeedbackEXT*>( this );
@@ -52065,6 +72593,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineCreationFeedbackEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default;
+#else
bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( flags == rhs.flags )
@@ -52075,38 +72607,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {};
uint64_t duration = {};
+
};
static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCreationFeedbackEXT>::value, "struct wrapper is not a standard layout!" );
struct PipelineCreationFeedbackCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {},
- uint32_t pipelineStageCreationFeedbackCount_ = {},
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT
- : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
- , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
- , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {}) VULKAN_HPP_NOEXCEPT
+ : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ), pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const & pipelineStageCreationFeedbacks_ )
+ : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineCreationFeedbackCreateInfoEXT & operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCreationFeedbackCreateInfoEXT ) );
return *this;
}
@@ -52134,6 +72679,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
+ pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT*>( this );
@@ -52144,6 +72699,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -52157,6 +72716,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
@@ -52164,36 +72726,50 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {};
uint32_t pipelineStageCreationFeedbackCount = {};
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {};
+
};
static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCreationFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfoEXT>
+ {
+ using Type = PipelineCreationFeedbackCreateInfoEXT;
+ };
+
struct PipelineDiscardRectangleStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
- uint32_t discardRectangleCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , discardRectangleMode( discardRectangleMode_ )
- , discardRectangleCount( discardRectangleCount_ )
- , pDiscardRectangles( pDiscardRectangles_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( discardRectangleCount_ ), pDiscardRectangles( pDiscardRectangles_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
+ : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
return *this;
}
@@ -52227,6 +72803,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
+ {
+ discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
+ pDiscardRectangles = discardRectangles_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
@@ -52237,6 +72823,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -52251,6 +72841,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
@@ -52259,572 +72852,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
uint32_t discardRectangleCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {};
+
};
static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct PipelineExecutableInfoKHR
- {
- VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
- uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipeline( pipeline_ )
- , executableIndex( executableIndex_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) - offsetof( PipelineExecutableInfoKHR, pNext ) );
- return *this;
- }
-
- PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>(&rhs);
- return *this;
- }
-
- PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
- {
- pipeline = pipeline_;
- return *this;
- }
-
- PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
- {
- executableIndex = executableIndex_;
- return *this;
- }
-
- operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
- }
-
- operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
- }
-
- bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( pipeline == rhs.pipeline )
- && ( executableIndex == rhs.executableIndex );
- }
-
- bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
- uint32_t executableIndex = {};
- };
- static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineExecutableInternalRepresentationKHR
- {
- PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 isText_ = {},
- size_t dataSize_ = {},
- void* pData_ = {} ) VULKAN_HPP_NOEXCEPT
- : name{}
- , description{}
- , isText( isText_ )
- , dataSize( dataSize_ )
- , pData( pData_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
- }
-
- VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) );
- return *this;
- }
-
- PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>(&rhs);
- return *this;
- }
-
- operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
- }
-
- operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
- }
-
- bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( isText == rhs.isText )
- && ( dataSize == rhs.dataSize )
- && ( pData == rhs.pData );
- }
-
- bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
- void* pNext = {};
- char name[VK_MAX_DESCRIPTION_SIZE] = {};
- char description[VK_MAX_DESCRIPTION_SIZE] = {};
- VULKAN_HPP_NAMESPACE::Bool32 isText = {};
- size_t dataSize = {};
- void* pData = {};
- };
- static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineExecutablePropertiesKHR
- {
- PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
- uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : stages( stages_ )
- , name{}
- , description{}
- , subgroupSize( subgroupSize_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
- }
-
- VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) );
- return *this;
- }
-
- PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>(&rhs);
- return *this;
- }
-
- operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
- }
-
- operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
- }
-
- bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( stages == rhs.stages )
- && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
- && ( subgroupSize == rhs.subgroupSize );
- }
-
- bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
- char name[VK_MAX_DESCRIPTION_SIZE] = {};
- char description[VK_MAX_DESCRIPTION_SIZE] = {};
- uint32_t subgroupSize = {};
- };
- static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
-
- union PipelineExecutableStatisticValueKHR
- {
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
- return *this;
- }
-
- operator VkPipelineExecutableStatisticValueKHR const&() const
- {
- return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>(this);
- }
-
- operator VkPipelineExecutableStatisticValueKHR &()
- {
- return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>(this);
- }
-
-#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
- VULKAN_HPP_NAMESPACE::Bool32 b32;
- int64_t i64;
- uint64_t u64;
- double f64;
-#else
- VkBool32 b32;
- int64_t i64;
- uint64_t u64;
- double f64;
-#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
- };
-
- struct PipelineExecutableStatisticKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
{
- PipelineExecutableStatisticKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32,
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT
- : name{}
- , description{}
- , format( format_ )
- , value( value_ )
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
- }
-
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) );
- return *this;
- }
-
- PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>(&rhs);
- return *this;
- }
-
- operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
- }
-
- operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
- void* pNext = {};
- char name[VK_MAX_DESCRIPTION_SIZE] = {};
- char description[VK_MAX_DESCRIPTION_SIZE] = {};
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
+ using Type = PipelineDiscardRectangleStateCreateInfoEXT;
};
- static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineInfoKHR
- {
- VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipeline( pipeline_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PipelineInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) - offsetof( PipelineInfoKHR, pNext ) );
- return *this;
- }
- PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>(&rhs);
- return *this;
- }
-
- PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
- {
- pipeline = pipeline_;
- return *this;
- }
-
- operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
- }
-
- operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineInfoKHR*>( this );
- }
-
- bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( pipeline == rhs.pipeline );
- }
-
- bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
- };
- static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- struct PushConstantRange
+ struct PipelineRasterizationConservativeStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
- uint32_t offset_ = {},
- uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT
- : stageFlags( stageFlags_ )
- , offset( offset_ )
- , size( size_ )
- {}
-
- PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>(&rhs);
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
- PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
- {
- stageFlags = stageFlags_;
- return *this;
- }
-
- PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
- {
- offset = offset_;
- return *this;
- }
-
- PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
- {
- size = size_;
- return *this;
- }
-
- operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPushConstantRange*>( this );
- }
-
- operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPushConstantRange*>( this );
- }
-
- bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( stageFlags == rhs.stageFlags )
- && ( offset == rhs.offset )
- && ( size == rhs.size );
- }
-
- bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
- uint32_t offset = {};
- uint32_t size = {};
- };
- static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineLayoutCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {},
- uint32_t setLayoutCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {},
- uint32_t pushConstantRangeCount_ = {},
- const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , setLayoutCount( setLayoutCount_ )
- , pSetLayouts( pSetLayouts_ )
- , pushConstantRangeCount( pushConstantRangeCount_ )
- , pPushConstantRanges( pPushConstantRanges_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), conservativeRasterizationMode( conservativeRasterizationMode_ ), extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) - offsetof( PipelineLayoutCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>(&rhs);
- return *this;
- }
-
- PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
- {
- setLayoutCount = setLayoutCount_;
- return *this;
- }
-
- PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pSetLayouts = pSetLayouts_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
return *this;
}
- PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pushConstantRangeCount = pushConstantRangeCount_;
- return *this;
- }
-
- PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
- {
- pPushConstantRanges = pPushConstantRanges_;
- return *this;
- }
-
- operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
- }
-
- operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
- }
-
- bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( setLayoutCount == rhs.setLayoutCount )
- && ( pSetLayouts == rhs.pSetLayouts )
- && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
- && ( pPushConstantRanges == rhs.pPushConstantRanges );
- }
-
- bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
- uint32_t setLayoutCount = {};
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
- uint32_t pushConstantRangeCount = {};
- const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {};
- };
- static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineRasterizationConservativeStateCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
- float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , conservativeRasterizationMode( conservativeRasterizationMode_ )
- , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) );
- return *this;
- }
-
- PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) );
return *this;
}
@@ -52852,6 +72917,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
@@ -52862,6 +72928,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -52875,6 +72945,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
@@ -52882,32 +72955,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
float extraPrimitiveOverestimationSize = {};
+
};
static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
+ {
+ using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
+ };
+
struct PipelineRasterizationDepthClipStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , depthClipEnable( depthClipEnable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), depthClipEnable( depthClipEnable_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
- PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) );
return *this;
}
@@ -52929,6 +73014,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
@@ -52939,6 +73025,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -52951,42 +73041,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
+
};
static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
+ {
+ using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
+ };
+
struct PipelineRasterizationLineStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault,
- VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {},
- uint32_t lineStippleFactor_ = {},
- uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT
- : lineRasterizationMode( lineRasterizationMode_ )
- , stippledLineEnable( stippledLineEnable_ )
- , lineStippleFactor( lineStippleFactor_ )
- , lineStipplePattern( lineStipplePattern_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}) VULKAN_HPP_NOEXCEPT
+ : lineRasterizationMode( lineRasterizationMode_ ), stippledLineEnable( stippledLineEnable_ ), lineStippleFactor( lineStippleFactor_ ), lineStipplePattern( lineStipplePattern_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationLineStateCreateInfoEXT ) );
return *this;
}
@@ -53020,6 +73121,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
@@ -53030,6 +73132,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53044,6 +73150,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
@@ -53052,30 +73161,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
uint32_t lineStippleFactor = {};
uint16_t lineStipplePattern = {};
+
};
static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
+ {
+ using Type = PipelineRasterizationLineStateCreateInfoEXT;
+ };
+
struct PipelineRasterizationStateRasterizationOrderAMD
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
- : rasterizationOrder( rasterizationOrder_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict) VULKAN_HPP_NOEXCEPT
+ : rasterizationOrder( rasterizationOrder_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
+ return *this;
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
return *this;
}
@@ -53091,6 +73214,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
@@ -53101,6 +73225,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default;
+#else
bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53112,37 +73240,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
+
};
static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
+ {
+ using Type = PipelineRasterizationStateRasterizationOrderAMD;
+ };
+
struct PipelineRasterizationStateStreamCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {},
- uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , rasterizationStream( rasterizationStream_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), rasterizationStream( rasterizationStream_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
- PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) );
return *this;
}
@@ -53164,6 +73307,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
@@ -53174,6 +73318,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53186,36 +73334,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
uint32_t rasterizationStream = {};
+
};
static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
+ {
+ using Type = PipelineRasterizationStateStreamCreateInfoEXT;
+ };
+
struct PipelineRepresentativeFragmentTestStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) );
return *this;
}
@@ -53231,6 +73396,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
@@ -53241,6 +73407,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53252,37 +73422,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
+
};
static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
+ {
+ using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
+ };
+
struct PipelineSampleLocationsStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {},
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleLocationsEnable( sampleLocationsEnable_ )
- , sampleLocationsInfo( sampleLocationsInfo_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : sampleLocationsEnable( sampleLocationsEnable_ ), sampleLocationsInfo( sampleLocationsInfo_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
- PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) );
return *this;
}
@@ -53298,12 +73483,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
+
operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
@@ -53314,6 +73500,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53326,39 +73516,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
};
static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
+ {
+ using Type = PipelineSampleLocationsStateCreateInfoEXT;
+ };
+
struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
{
- PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : requiredSubgroupSize( requiredSubgroupSize_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(uint32_t requiredSubgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : requiredSubgroupSize( requiredSubgroupSize_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
- PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) );
return *this;
}
+
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
@@ -53369,6 +73577,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53380,35 +73592,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
void* pNext = {};
uint32_t requiredSubgroupSize = {};
+
};
static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>
+ {
+ using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+ };
+
struct PipelineTessellationDomainOriginStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT
- : domainOrigin( domainOrigin_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft) VULKAN_HPP_NOEXCEPT
+ : domainOrigin( domainOrigin_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
+ return *this;
+ }
- PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) );
return *this;
}
@@ -53424,6 +73653,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
@@ -53434,6 +73664,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default;
+#else
bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53445,31 +73679,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
+
};
static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
+ {
+ using Type = PipelineTessellationDomainOriginStateCreateInfo;
+ };
+ using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
+
struct VertexInputBindingDivisorDescriptionEXT
{
- VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {},
- uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : binding( binding_ )
- , divisor( divisor_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
+ : binding( binding_ ), divisor( divisor_ )
{}
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
+ return *this;
+ }
- VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) );
return *this;
}
@@ -53485,6 +73740,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>( this );
@@ -53495,6 +73751,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default;
+#else
bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( binding == rhs.binding )
@@ -53505,36 +73765,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t binding = {};
uint32_t divisor = {};
+
};
static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
struct PipelineVertexInputDivisorStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {},
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
- , pVertexBindingDivisors( pVertexBindingDivisors_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {}) VULKAN_HPP_NOEXCEPT
+ : vertexBindingDivisorCount( vertexBindingDivisorCount_ ), pVertexBindingDivisors( pVertexBindingDivisors_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
+ : vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) );
return *this;
}
@@ -53556,6 +73831,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
+ pVertexBindingDivisors = vertexBindingDivisors_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
@@ -53566,6 +73851,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default;
+#else
bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53578,40 +73867,59 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
const void* pNext = {};
uint32_t vertexBindingDivisorCount = {};
const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {};
+
};
static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
+ {
+ using Type = PipelineVertexInputDivisorStateCreateInfoEXT;
+ };
+
struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault,
- uint32_t customSampleOrderCount_ = {},
- const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleOrderType( sampleOrderType_ )
- , customSampleOrderCount( customSampleOrderCount_ )
- , pCustomSampleOrders( pCustomSampleOrders_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {}) VULKAN_HPP_NOEXCEPT
+ : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( customSampleOrderCount_ ), pCustomSampleOrders( pCustomSampleOrders_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ )
+ : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) );
return *this;
}
@@ -53639,6 +73947,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+ {
+ customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
+ pCustomSampleOrders = customSampleOrders_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
@@ -53649,6 +73967,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53662,6 +73984,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
@@ -53669,32 +73994,50 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
uint32_t customSampleOrderCount = {};
const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {};
+
};
static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
+ {
+ using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
+ };
+
struct PipelineViewportExclusiveScissorStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT
- : exclusiveScissorCount( exclusiveScissorCount_ )
- , pExclusiveScissors( pExclusiveScissors_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {}) VULKAN_HPP_NOEXCEPT
+ : exclusiveScissorCount( exclusiveScissorCount_ ), pExclusiveScissors( pExclusiveScissors_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
+ : exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) );
return *this;
}
@@ -53716,6 +74059,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+ {
+ exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
+ pExclusiveScissors = exclusiveScissors_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
@@ -53726,6 +74079,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53738,99 +74095,59 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
const void* pNext = {};
uint32_t exclusiveScissorCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {};
+
};
static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct ShadingRatePaletteNV
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
{
- VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ )
- , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
- {}
-
- ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>(&rhs);
- return *this;
- }
-
- ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
- {
- shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
- return *this;
- }
-
- ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
- {
- pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
- return *this;
- }
+ using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
+ };
- operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
- }
+ struct PipelineViewportShadingRateImageStateCreateInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
- operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {}) VULKAN_HPP_NOEXCEPT
+ : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( viewportCount_ ), pShadingRatePalettes( pShadingRatePalettes_ )
+ {}
- bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
- && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
- }
+ VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ *this = rhs;
}
- public:
- uint32_t shadingRatePaletteEntryCount = {};
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
- };
- static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineViewportShadingRateImageStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRateImageEnable( shadingRateImageEnable_ )
- , viewportCount( viewportCount_ )
- , pShadingRatePalettes( pShadingRatePalettes_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ )
+ : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
return *this;
}
- PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
- }
-
- PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) );
return *this;
}
@@ -53858,6 +74175,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
+ pShadingRatePalettes = shadingRatePalettes_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
@@ -53868,6 +74195,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -53881,6 +74212,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
@@ -53888,30 +74222,43 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {};
+
};
static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
+ {
+ using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
+ };
+
struct ViewportSwizzleNV
{
- VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
- : x( x_ )
- , y( y_ )
- , z( z_ )
- , w( w_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ ), z( z_ ), w( w_ )
{}
+ VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
+ return *this;
+ }
- ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ViewportSwizzleNV ) );
return *this;
}
@@ -53939,6 +74286,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
@@ -53949,6 +74297,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkViewportSwizzleNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ViewportSwizzleNV const& ) const = default;
+#else
bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( x == rhs.x )
@@ -53961,40 +74313,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+
};
static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
struct PipelineViewportSwizzleStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , viewportCount( viewportCount_ )
- , pViewportSwizzles( pViewportSwizzles_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), viewportCount( viewportCount_ ), pViewportSwizzles( pViewportSwizzles_ )
{}
- VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ )
+ : flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
return *this;
}
@@ -54022,6 +74387,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
+ pViewportSwizzles = viewportSwizzles_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
@@ -54032,6 +74407,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -54045,6 +74424,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
@@ -54052,93 +74434,50 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {};
+
};
static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- struct ViewportWScalingNV
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
{
- VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {},
- float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT
- : xcoeff( xcoeff_ )
- , ycoeff( ycoeff_ )
- {}
-
- ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>(&rhs);
- return *this;
- }
-
- ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
- {
- xcoeff = xcoeff_;
- return *this;
- }
-
- ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
- {
- ycoeff = ycoeff_;
- return *this;
- }
+ using Type = PipelineViewportSwizzleStateCreateInfoNV;
+ };
- operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkViewportWScalingNV*>( this );
- }
+ struct PipelineViewportWScalingStateCreateInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
- operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkViewportWScalingNV*>( this );
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {}) VULKAN_HPP_NOEXCEPT
+ : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( viewportCount_ ), pViewportWScalings( pViewportWScalings_ )
+ {}
- bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( xcoeff == rhs.xcoeff )
- && ( ycoeff == rhs.ycoeff );
- }
+ VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ *this = rhs;
}
- public:
- float xcoeff = {};
- float ycoeff = {};
- };
- static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
-
- struct PipelineViewportWScalingStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT
- : viewportWScalingEnable( viewportWScalingEnable_ )
- , viewportCount( viewportCount_ )
- , pViewportWScalings( pViewportWScalings_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ )
+ : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
return *this;
}
- PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
return *this;
}
@@ -54166,6 +74505,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
+ pViewportWScalings = viewportWScalings_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
@@ -54176,6 +74525,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default;
+#else
bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -54189,6 +74542,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
@@ -54196,32 +74552,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {};
+
};
static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_GGP
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
+ {
+ using Type = PipelineViewportWScalingStateCreateInfoNV;
+ };
+#ifdef VK_USE_PLATFORM_GGP
struct PresentFrameTokenGGP
{
- VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT
- : frameToken( frameToken_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}) VULKAN_HPP_NOEXCEPT
+ : frameToken( frameToken_ )
{}
- VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP & operator=( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) - offsetof( PresentFrameTokenGGP, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
+ return *this;
+ }
+
+ PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentFrameTokenGGP ) );
return *this;
}
@@ -54237,6 +74606,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
@@ -54247,177 +74617,83 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PresentFrameTokenGGP const& ) const = default;
+#else
bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( frameToken == rhs.frameToken );
+ && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
}
bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
const void* pNext = {};
GgpFrameToken frameToken = {};
+
};
static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_GGP*/
- struct PresentInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
{
- VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
- uint32_t swapchainCount_ = {},
- const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {},
- const uint32_t* pImageIndices_ = {},
- VULKAN_HPP_NAMESPACE::Result* pResults_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , swapchainCount( swapchainCount_ )
- , pSwapchains( pSwapchains_ )
- , pImageIndices( pImageIndices_ )
- , pResults( pResults_ )
- {}
-
- VULKAN_HPP_NAMESPACE::PresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) - offsetof( PresentInfoKHR, pNext ) );
- return *this;
- }
-
- PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>(&rhs);
- return *this;
- }
-
- PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
- {
- waitSemaphoreCount = waitSemaphoreCount_;
- return *this;
- }
-
- PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
- {
- pWaitSemaphores = pWaitSemaphores_;
- return *this;
- }
-
- PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
- {
- swapchainCount = swapchainCount_;
- return *this;
- }
-
- PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
- {
- pSwapchains = pSwapchains_;
- return *this;
- }
-
- PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT
- {
- pImageIndices = pImageIndices_;
- return *this;
- }
+ using Type = PresentFrameTokenGGP;
+ };
+#endif /*VK_USE_PLATFORM_GGP*/
- PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
- {
- pResults = pResults_;
- return *this;
- }
+ struct RectLayerKHR
+ {
- operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPresentInfoKHR*>( this );
- }
- operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPresentInfoKHR*>( this );
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT
+ : offset( offset_ ), extent( extent_ ), layer( layer_ )
+ {}
- bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
- && ( pWaitSemaphores == rhs.pWaitSemaphores )
- && ( swapchainCount == rhs.swapchainCount )
- && ( pSwapchains == rhs.pSwapchains )
- && ( pImageIndices == rhs.pImageIndices )
- && ( pResults == rhs.pResults );
- }
+ VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ *this = rhs;
}
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
- const void* pNext = {};
- uint32_t waitSemaphoreCount = {};
- const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
- uint32_t swapchainCount = {};
- const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {};
- const uint32_t* pImageIndices = {};
- VULKAN_HPP_NAMESPACE::Result* pResults = {};
- };
- static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- struct RectLayerKHR
- {
- VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D extent_ = {},
- uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT
- : offset( offset_ )
- , extent( extent_ )
- , layer( layer_ )
- {}
-
- explicit RectLayerKHR( Rect2D const& rect2D,
- uint32_t layer_ = {} )
+ explicit RectLayerKHR( Rect2D const& rect2D, uint32_t layer_ = {} )
: offset( rect2D.offset )
, extent( rect2D.extent )
, layer( layer_ )
{}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
+ return *this;
}
- RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RectLayerKHR ) );
return *this;
}
- RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
+ RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
+ RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -54429,6 +74705,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRectLayerKHR*>( this );
@@ -54439,6 +74716,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRectLayerKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RectLayerKHR const& ) const = default;
+#else
bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( offset == rhs.offset )
@@ -54450,31 +74731,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
uint32_t layer = {};
+
};
static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
struct PresentRegionKHR
{
- VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {},
- const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
- : rectangleCount( rectangleCount_ )
- , pRectangles( pRectangles_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+ : rectangleCount( rectangleCount_ ), pRectangles( pRectangles_ )
{}
+ VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
+ : rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
+ return *this;
+ }
+
+ PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentRegionKHR ) );
return *this;
}
@@ -54490,6 +74791,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rectangleCount = static_cast<uint32_t>( rectangles_.size() );
+ pRectangles = rectangles_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentRegionKHR*>( this );
@@ -54500,6 +74811,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPresentRegionKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PresentRegionKHR const& ) const = default;
+#else
bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( rectangleCount == rhs.rectangleCount )
@@ -54510,36 +74825,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t rectangleCount = {};
const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {};
+
};
static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
struct PresentRegionsKHR
{
- VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {},
- const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
- , pRegions( pRegions_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : swapchainCount( swapchainCount_ ), pRegions( pRegions_ )
{}
- VULKAN_HPP_NAMESPACE::PresentRegionsKHR & operator=( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) - offsetof( PresentRegionsKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
+ : swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
+ return *this;
+ }
+
+ PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentRegionsKHR ) );
return *this;
}
@@ -54561,6 +74891,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapchainCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
@@ -54571,6 +74911,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPresentRegionsKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PresentRegionsKHR const& ) const = default;
+#else
bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -54583,32 +74927,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
const void* pNext = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {};
+
};
static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePresentRegionsKHR>
+ {
+ using Type = PresentRegionsKHR;
+ };
+
struct PresentTimeGOOGLE
{
- VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {},
- uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT
- : presentID( presentID_ )
- , desiredPresentTime( desiredPresentTime_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT
+ : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ )
{}
+ VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
+ return *this;
+ }
- PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentTimeGOOGLE ) );
return *this;
}
@@ -54624,6 +74988,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
@@ -54634,6 +74999,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PresentTimeGOOGLE const& ) const = default;
+#else
bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( presentID == rhs.presentID )
@@ -54644,36 +75013,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t presentID = {};
uint64_t desiredPresentTime = {};
+
};
static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
struct PresentTimesInfoGOOGLE
{
- VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {},
- const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
- , pTimes( pTimes_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {}) VULKAN_HPP_NOEXCEPT
+ : swapchainCount( swapchainCount_ ), pTimes( pTimes_ )
{}
- VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE & operator=( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) - offsetof( PresentTimesInfoGOOGLE, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
+ : swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
+ return *this;
+ }
+
+ PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentTimesInfoGOOGLE ) );
return *this;
}
@@ -54695,6 +75079,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapchainCount = static_cast<uint32_t>( times_.size() );
+ pTimes = times_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
@@ -54705,6 +75099,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default;
+#else
bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -54717,36 +75115,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
const void* pNext = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {};
+
};
static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
+ {
+ using Type = PresentTimesInfoGOOGLE;
+ };
+
struct ProtectedSubmitInfo
{
- VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT
- : protectedSubmit( protectedSubmit_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}) VULKAN_HPP_NOEXCEPT
+ : protectedSubmit( protectedSubmit_ )
{}
- VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) - offsetof( ProtectedSubmitInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
+ return *this;
+ }
- ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ProtectedSubmitInfo ) );
return *this;
}
@@ -54762,6 +75177,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
@@ -54772,6 +75188,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ProtectedSubmitInfo const& ) const = default;
+#else
bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -54783,283 +75203,144 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
+
};
static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
- struct QueryPoolCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
{
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
- uint32_t queryCount_ = {},
- VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queryType( queryType_ )
- , queryCount( queryCount_ )
- , pipelineStatistics( pipelineStatistics_ )
+ using Type = ProtectedSubmitInfo;
+ };
+
+ struct QueryPoolPerformanceQueryCreateInfoINTEL
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual) VULKAN_HPP_NOEXCEPT
+ : performanceCountersSampling( performanceCountersSampling_ )
{}
- VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) - offsetof( QueryPoolCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
return *this;
}
- QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) );
return *this;
}
- QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
{
- queryType = queryType_;
+ performanceCountersSampling = performanceCountersSampling_;
return *this;
}
- QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
- {
- queryCount = queryCount_;
- return *this;
- }
- QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
{
- pipelineStatistics = pipelineStatistics_;
- return *this;
+ return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
}
- operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
+ return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
}
- operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
- }
- bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default;
+#else
+ bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( queryType == rhs.queryType )
- && ( queryCount == rhs.queryCount )
- && ( pipelineStatistics == rhs.pipelineStatistics );
+ && ( performanceCountersSampling == rhs.performanceCountersSampling );
}
- bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
- uint32_t queryCount = {};
- VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
- };
- static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
-
- struct QueryPoolCreateInfoINTEL
- {
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
- : performanceCountersSampling( performanceCountersSampling_ )
- {}
-
- VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL ) - offsetof( QueryPoolCreateInfoINTEL, pNext ) );
- return *this;
- }
-
- QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const *>(&rhs);
- return *this;
- }
-
- QueryPoolCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
- {
- performanceCountersSampling = performanceCountersSampling_;
- return *this;
- }
-
- operator VkQueryPoolCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkQueryPoolCreateInfoINTEL*>( this );
- }
-
- operator VkQueryPoolCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkQueryPoolCreateInfoINTEL*>( this );
- }
- bool operator==( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( performanceCountersSampling == rhs.performanceCountersSampling );
- }
-
- bool operator!=( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
+
};
- static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<QueryPoolCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueryPoolPerformanceQueryCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- struct QueryPoolPerformanceCreateInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
{
- VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {},
- uint32_t counterIndexCount_ = {},
- const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : queueFamilyIndex( queueFamilyIndex_ )
- , counterIndexCount( counterIndexCount_ )
- , pCounterIndices( pCounterIndices_ )
- {}
-
- VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) );
- return *this;
- }
-
- QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>(&rhs);
- return *this;
- }
-
- QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
- {
- queueFamilyIndex = queueFamilyIndex_;
- return *this;
- }
-
- QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
- {
- counterIndexCount = counterIndexCount_;
- return *this;
- }
-
- QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
- {
- pCounterIndices = pCounterIndices_;
- return *this;
- }
-
- operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
- }
-
- operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
- }
-
- bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( queueFamilyIndex == rhs.queueFamilyIndex )
- && ( counterIndexCount == rhs.counterIndexCount )
- && ( pCounterIndices == rhs.pCounterIndices );
- }
-
- bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
- const void* pNext = {};
- uint32_t queueFamilyIndex = {};
- uint32_t counterIndexCount = {};
- const uint32_t* pCounterIndices = {};
+ using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
};
- static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
struct QueueFamilyCheckpointPropertiesNV
{
- QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
{}
- VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
return *this;
}
+ QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( QueueFamilyCheckpointPropertiesNV ) );
+ return *this;
+ }
+
+
operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
@@ -55070,6 +75351,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default;
+#else
bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -55081,438 +75366,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
void* pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
+
};
static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- struct QueueFamilyProperties
+ template <>
+ struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
{
- QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {},
- uint32_t queueCount_ = {},
- uint32_t timestampValidBits_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
- : queueFlags( queueFlags_ )
- , queueCount( queueCount_ )
- , timestampValidBits( timestampValidBits_ )
- , minImageTransferGranularity( minImageTransferGranularity_ )
- {}
-
- QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>(&rhs);
- return *this;
- }
-
- operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
- }
-
- operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkQueueFamilyProperties*>( this );
- }
-
- bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( queueFlags == rhs.queueFlags )
- && ( queueCount == rhs.queueCount )
- && ( timestampValidBits == rhs.timestampValidBits )
- && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
- }
-
- bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
- uint32_t queueCount = {};
- uint32_t timestampValidBits = {};
- VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
+ using Type = QueueFamilyCheckpointPropertiesNV;
};
- static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
-
- struct QueueFamilyProperties2
- {
- QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : queueFamilyProperties( queueFamilyProperties_ )
- {}
-
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) - offsetof( QueueFamilyProperties2, pNext ) );
- return *this;
- }
-
- QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>(&rhs);
- return *this;
- }
-
- operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
- }
-
- operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
- }
-
- bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( queueFamilyProperties == rhs.queueFamilyProperties );
- }
- bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
- };
- static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
-
- struct RayTracingShaderGroupCreateInfoNV
+ struct RenderPassAttachmentBeginInfo
{
- VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral,
- uint32_t generalShader_ = {},
- uint32_t closestHitShader_ = {},
- uint32_t anyHitShader_ = {},
- uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , generalShader( generalShader_ )
- , closestHitShader( closestHitShader_ )
- , anyHitShader( anyHitShader_ )
- , intersectionShader( intersectionShader_ )
- {}
-
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) );
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>(&rhs);
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT
- {
- type = type_;
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
- {
- generalShader = generalShader_;
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
- {
- closestHitShader = closestHitShader_;
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
- {
- anyHitShader = anyHitShader_;
- return *this;
- }
-
- RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
- {
- intersectionShader = intersectionShader_;
- return *this;
- }
-
- operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
- }
-
- operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
- }
-
- bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( type == rhs.type )
- && ( generalShader == rhs.generalShader )
- && ( closestHitShader == rhs.closestHitShader )
- && ( anyHitShader == rhs.anyHitShader )
- && ( intersectionShader == rhs.intersectionShader );
- }
-
- bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral;
- uint32_t generalShader = {};
- uint32_t closestHitShader = {};
- uint32_t anyHitShader = {};
- uint32_t intersectionShader = {};
- };
- static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
- struct RayTracingPipelineCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- uint32_t stageCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {},
- uint32_t groupCount_ = {},
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {},
- uint32_t maxRecursionDepth_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stageCount( stageCount_ )
- , pStages( pStages_ )
- , groupCount( groupCount_ )
- , pGroups( pGroups_ )
- , maxRecursionDepth( maxRecursionDepth_ )
- , layout( layout_ )
- , basePipelineHandle( basePipelineHandle_ )
- , basePipelineIndex( basePipelineIndex_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+ : attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ )
{}
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) );
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>(&rhs);
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
- {
- stageCount = stageCount_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
- {
- pStages = pStages_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
- {
- groupCount = groupCount_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
- {
- pGroups = pGroups_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
- {
- maxRecursionDepth = maxRecursionDepth_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
- {
- layout = layout_;
- return *this;
- }
-
- RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
- {
- basePipelineHandle = basePipelineHandle_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
- {
- basePipelineIndex = basePipelineIndex_;
- return *this;
- }
-
- operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
- }
-
- operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
- }
-
- bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( stageCount == rhs.stageCount )
- && ( pStages == rhs.pStages )
- && ( groupCount == rhs.groupCount )
- && ( pGroups == rhs.pGroups )
- && ( maxRecursionDepth == rhs.maxRecursionDepth )
- && ( layout == rhs.layout )
- && ( basePipelineHandle == rhs.basePipelineHandle )
- && ( basePipelineIndex == rhs.basePipelineIndex );
- }
-
- bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
- uint32_t stageCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
- uint32_t groupCount = {};
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {};
- uint32_t maxRecursionDepth = {};
- VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
- int32_t basePipelineIndex = {};
- };
- static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
-
- struct RefreshCycleDurationGOOGLE
- {
- RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT
- : refreshDuration( refreshDuration_ )
- {}
-
- RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>(&rhs);
- return *this;
- }
-
- operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
- }
-
- operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
- }
-
- bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( refreshDuration == rhs.refreshDuration );
- }
-
- bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- uint64_t refreshDuration = {};
- };
- static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
-
- struct RenderPassAttachmentBeginInfo
- {
- VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
+ : attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) - offsetof( RenderPassAttachmentBeginInfo, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
return *this;
}
- RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- RenderPassAttachmentBeginInfo& operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassAttachmentBeginInfo ) );
return *this;
}
@@ -55534,6 +75439,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentCount = static_cast<uint32_t>( attachments_.size() );
+ pAttachments = attachments_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
@@ -55544,6 +75459,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassAttachmentBeginInfo const& ) const = default;
+#else
bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -55556,959 +75475,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
const void* pNext = {};
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
+
};
static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
- struct RenderPassBeginInfo
- {
- VULKAN_HPP_CONSTEXPR RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
- VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
- uint32_t clearValueCount_ = {},
- const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
- , framebuffer( framebuffer_ )
- , renderArea( renderArea_ )
- , clearValueCount( clearValueCount_ )
- , pClearValues( pClearValues_ )
- {}
-
- VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) - offsetof( RenderPassBeginInfo, pNext ) );
- return *this;
- }
-
- RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>(&rhs);
- return *this;
- }
-
- RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
- {
- renderPass = renderPass_;
- return *this;
- }
-
- RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
- {
- framebuffer = framebuffer_;
- return *this;
- }
-
- RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D renderArea_ ) VULKAN_HPP_NOEXCEPT
- {
- renderArea = renderArea_;
- return *this;
- }
-
- RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
- {
- clearValueCount = clearValueCount_;
- return *this;
- }
-
- RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
- {
- pClearValues = pClearValues_;
- return *this;
- }
-
- operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
- }
-
- operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
- }
-
- bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( renderPass == rhs.renderPass )
- && ( framebuffer == rhs.framebuffer )
- && ( renderArea == rhs.renderArea )
- && ( clearValueCount == rhs.clearValueCount )
- && ( pClearValues == rhs.pClearValues );
- }
-
- bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
- VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
- uint32_t clearValueCount = {};
- const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
- };
- static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
-
- struct SubpassDescription
- {
- VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- uint32_t inputAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {},
- uint32_t preserveAttachmentCount_ = {},
- const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pipelineBindPoint( pipelineBindPoint_ )
- , inputAttachmentCount( inputAttachmentCount_ )
- , pInputAttachments( pInputAttachments_ )
- , colorAttachmentCount( colorAttachmentCount_ )
- , pColorAttachments( pColorAttachments_ )
- , pResolveAttachments( pResolveAttachments_ )
- , pDepthStencilAttachment( pDepthStencilAttachment_ )
- , preserveAttachmentCount( preserveAttachmentCount_ )
- , pPreserveAttachments( pPreserveAttachments_ )
- {}
-
- SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>(&rhs);
- return *this;
- }
-
- SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
- {
- pipelineBindPoint = pipelineBindPoint_;
- return *this;
- }
-
- SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- inputAttachmentCount = inputAttachmentCount_;
- return *this;
- }
-
- SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pInputAttachments = pInputAttachments_;
- return *this;
- }
-
- SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- colorAttachmentCount = colorAttachmentCount_;
- return *this;
- }
-
- SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pColorAttachments = pColorAttachments_;
- return *this;
- }
-
- SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pResolveAttachments = pResolveAttachments_;
- return *this;
- }
-
- SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
- {
- pDepthStencilAttachment = pDepthStencilAttachment_;
- return *this;
- }
-
- SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- preserveAttachmentCount = preserveAttachmentCount_;
- return *this;
- }
-
- SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pPreserveAttachments = pPreserveAttachments_;
- return *this;
- }
-
- operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassDescription*>( this );
- }
-
- operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassDescription*>( this );
- }
-
- bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( flags == rhs.flags )
- && ( pipelineBindPoint == rhs.pipelineBindPoint )
- && ( inputAttachmentCount == rhs.inputAttachmentCount )
- && ( pInputAttachments == rhs.pInputAttachments )
- && ( colorAttachmentCount == rhs.colorAttachmentCount )
- && ( pColorAttachments == rhs.pColorAttachments )
- && ( pResolveAttachments == rhs.pResolveAttachments )
- && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
- && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
- && ( pPreserveAttachments == rhs.pPreserveAttachments );
- }
-
- bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- uint32_t inputAttachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {};
- uint32_t colorAttachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {};
- uint32_t preserveAttachmentCount = {};
- const uint32_t* pPreserveAttachments = {};
- };
- static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );
-
- struct SubpassDependency
- {
- VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {},
- uint32_t dstSubpass_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubpass( srcSubpass_ )
- , dstSubpass( dstSubpass_ )
- , srcStageMask( srcStageMask_ )
- , dstStageMask( dstStageMask_ )
- , srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , dependencyFlags( dependencyFlags_ )
- {}
-
- SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>(&rhs);
- return *this;
- }
-
- SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
- {
- srcSubpass = srcSubpass_;
- return *this;
- }
-
- SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSubpass = dstSubpass_;
- return *this;
- }
-
- SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
- {
- srcStageMask = srcStageMask_;
- return *this;
- }
-
- SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
- {
- dstStageMask = dstStageMask_;
- return *this;
- }
-
- SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- srcAccessMask = srcAccessMask_;
- return *this;
- }
-
- SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- dstAccessMask = dstAccessMask_;
- return *this;
- }
-
- SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
- {
- dependencyFlags = dependencyFlags_;
- return *this;
- }
-
- operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassDependency*>( this );
- }
-
- operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassDependency*>( this );
- }
-
- bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( srcSubpass == rhs.srcSubpass )
- && ( dstSubpass == rhs.dstSubpass )
- && ( srcStageMask == rhs.srcStageMask )
- && ( dstStageMask == rhs.dstStageMask )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask )
- && ( dependencyFlags == rhs.dependencyFlags );
- }
-
- bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- uint32_t srcSubpass = {};
- uint32_t dstSubpass = {};
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
- };
- static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );
-
- struct RenderPassCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
{
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {},
- uint32_t subpassCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {},
- uint32_t dependencyCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , subpassCount( subpassCount_ )
- , pSubpasses( pSubpasses_ )
- , dependencyCount( dependencyCount_ )
- , pDependencies( pDependencies_ )
- {}
-
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) - offsetof( RenderPassCreateInfo, pNext ) );
- return *this;
- }
-
- RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>(&rhs);
- return *this;
- }
-
- RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- attachmentCount = attachmentCount_;
- return *this;
- }
-
- RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pAttachments = pAttachments_;
- return *this;
- }
-
- RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
- {
- subpassCount = subpassCount_;
- return *this;
- }
-
- RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
- {
- pSubpasses = pSubpasses_;
- return *this;
- }
-
- RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
- {
- dependencyCount = dependencyCount_;
- return *this;
- }
-
- RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
- {
- pDependencies = pDependencies_;
- return *this;
- }
-
- operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
- }
-
- operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
- }
-
- bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( attachmentCount == rhs.attachmentCount )
- && ( pAttachments == rhs.pAttachments )
- && ( subpassCount == rhs.subpassCount )
- && ( pSubpasses == rhs.pSubpasses )
- && ( dependencyCount == rhs.dependencyCount )
- && ( pDependencies == rhs.pDependencies );
- }
-
- bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
- uint32_t attachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {};
- uint32_t subpassCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {};
- uint32_t dependencyCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {};
+ using Type = RenderPassAttachmentBeginInfo;
};
- static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
- struct SubpassDescription2
+ struct RenderPassFragmentDensityMapCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- uint32_t viewMask_ = {},
- uint32_t inputAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {},
- uint32_t preserveAttachmentCount_ = {},
- const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pipelineBindPoint( pipelineBindPoint_ )
- , viewMask( viewMask_ )
- , inputAttachmentCount( inputAttachmentCount_ )
- , pInputAttachments( pInputAttachments_ )
- , colorAttachmentCount( colorAttachmentCount_ )
- , pColorAttachments( pColorAttachments_ )
- , pResolveAttachments( pResolveAttachments_ )
- , pDepthStencilAttachment( pDepthStencilAttachment_ )
- , preserveAttachmentCount( preserveAttachmentCount_ )
- , pPreserveAttachments( pPreserveAttachments_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SubpassDescription2 & operator=( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) - offsetof( SubpassDescription2, pNext ) );
- return *this;
- }
-
- SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SubpassDescription2& operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>(&rhs);
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
- SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
- {
- pipelineBindPoint = pipelineBindPoint_;
- return *this;
- }
-
- SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
- {
- viewMask = viewMask_;
- return *this;
- }
-
- SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- inputAttachmentCount = inputAttachmentCount_;
- return *this;
- }
-
- SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pInputAttachments = pInputAttachments_;
- return *this;
- }
-
- SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- colorAttachmentCount = colorAttachmentCount_;
- return *this;
- }
-
- SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pColorAttachments = pColorAttachments_;
- return *this;
- }
-
- SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pResolveAttachments = pResolveAttachments_;
- return *this;
- }
-
- SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
- {
- pDepthStencilAttachment = pDepthStencilAttachment_;
- return *this;
- }
-
- SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- preserveAttachmentCount = preserveAttachmentCount_;
- return *this;
- }
-
- SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pPreserveAttachments = pPreserveAttachments_;
- return *this;
- }
-
- operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassDescription2*>( this );
- }
-
- operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassDescription2*>( this );
- }
-
- bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( pipelineBindPoint == rhs.pipelineBindPoint )
- && ( viewMask == rhs.viewMask )
- && ( inputAttachmentCount == rhs.inputAttachmentCount )
- && ( pInputAttachments == rhs.pInputAttachments )
- && ( colorAttachmentCount == rhs.colorAttachmentCount )
- && ( pColorAttachments == rhs.pColorAttachments )
- && ( pResolveAttachments == rhs.pResolveAttachments )
- && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
- && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
- && ( pPreserveAttachments == rhs.pPreserveAttachments );
- }
-
- bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- uint32_t viewMask = {};
- uint32_t inputAttachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {};
- uint32_t colorAttachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {};
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {};
- uint32_t preserveAttachmentCount = {};
- const uint32_t* pPreserveAttachments = {};
- };
- static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
-
- struct SubpassDependency2
- {
- VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {},
- uint32_t dstSubpass_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
- int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubpass( srcSubpass_ )
- , dstSubpass( dstSubpass_ )
- , srcStageMask( srcStageMask_ )
- , dstStageMask( dstStageMask_ )
- , srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , dependencyFlags( dependencyFlags_ )
- , viewOffset( viewOffset_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}) VULKAN_HPP_NOEXCEPT
+ : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
{}
- VULKAN_HPP_NAMESPACE::SubpassDependency2 & operator=( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) - offsetof( SubpassDependency2, pNext ) );
- return *this;
- }
-
- SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SubpassDependency2& operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>(&rhs);
- return *this;
- }
-
- SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
- {
- srcSubpass = srcSubpass_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSubpass = dstSubpass_;
- return *this;
- }
-
- SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
- {
- srcStageMask = srcStageMask_;
- return *this;
- }
-
- SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
- {
- dstStageMask = dstStageMask_;
- return *this;
- }
-
- SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- srcAccessMask = srcAccessMask_;
- return *this;
- }
-
- SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- dstAccessMask = dstAccessMask_;
- return *this;
- }
-
- SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
- {
- dependencyFlags = dependencyFlags_;
- return *this;
- }
-
- SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- viewOffset = viewOffset_;
- return *this;
- }
-
- operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassDependency2*>( this );
- }
-
- operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassDependency2*>( this );
- }
-
- bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( srcSubpass == rhs.srcSubpass )
- && ( dstSubpass == rhs.dstSubpass )
- && ( srcStageMask == rhs.srcStageMask )
- && ( dstStageMask == rhs.dstStageMask )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask )
- && ( dependencyFlags == rhs.dependencyFlags )
- && ( viewOffset == rhs.viewOffset );
- }
-
- bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
- const void* pNext = {};
- uint32_t srcSubpass = {};
- uint32_t dstSubpass = {};
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
- int32_t viewOffset = {};
- };
- static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
-
- struct RenderPassCreateInfo2
- {
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {},
- uint32_t subpassCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {},
- uint32_t dependencyCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {},
- uint32_t correlatedViewMaskCount_ = {},
- const uint32_t* pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , subpassCount( subpassCount_ )
- , pSubpasses( pSubpasses_ )
- , dependencyCount( dependencyCount_ )
- , pDependencies( pDependencies_ )
- , correlatedViewMaskCount( correlatedViewMaskCount_ )
- , pCorrelatedViewMasks( pCorrelatedViewMasks_ )
- {}
-
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) - offsetof( RenderPassCreateInfo2, pNext ) );
- return *this;
- }
-
- RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- RenderPassCreateInfo2& operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>(&rhs);
- return *this;
- }
-
- RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
- {
- attachmentCount = attachmentCount_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT
- {
- pAttachments = pAttachments_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
- {
- subpassCount = subpassCount_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
- {
- pSubpasses = pSubpasses_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
- {
- dependencyCount = dependencyCount_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT
- {
- pDependencies = pDependencies_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
- {
- correlatedViewMaskCount = correlatedViewMaskCount_;
- return *this;
- }
-
- RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
- {
- pCorrelatedViewMasks = pCorrelatedViewMasks_;
- return *this;
- }
-
- operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
- }
-
- operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
- }
-
- bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( attachmentCount == rhs.attachmentCount )
- && ( pAttachments == rhs.pAttachments )
- && ( subpassCount == rhs.subpassCount )
- && ( pSubpasses == rhs.pSubpasses )
- && ( dependencyCount == rhs.dependencyCount )
- && ( pDependencies == rhs.pDependencies )
- && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
- && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
- }
-
- bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+ RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
- uint32_t attachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {};
- uint32_t subpassCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {};
- uint32_t dependencyCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {};
- uint32_t correlatedViewMaskCount = {};
- const uint32_t* pCorrelatedViewMasks = {};
- };
- static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
-
- struct RenderPassFragmentDensityMapCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
- {}
-
- VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
return *this;
}
- RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
- }
-
- RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) );
return *this;
}
@@ -56518,12 +75532,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
return *this;
}
+
operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
@@ -56534,6 +75549,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default;
+#else
bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -56545,37 +75564,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
+
};
static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
+ {
+ using Type = RenderPassFragmentDensityMapCreateInfoEXT;
+ };
+
struct RenderPassInputAttachmentAspectCreateInfo
{
- VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {},
- const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectReferenceCount( aspectReferenceCount_ )
- , pAspectReferences( pAspectReferences_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectReferenceCount( aspectReferenceCount_ ), pAspectReferences( pAspectReferences_ )
{}
- VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ )
+ : aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) );
return *this;
}
@@ -56597,6 +75637,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
+ {
+ aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
+ pAspectReferences = aspectReferences_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
@@ -56607,6 +75657,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default;
+#else
bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -56619,46 +75673,60 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
const void* pNext = {};
uint32_t aspectReferenceCount = {};
const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {};
+
};
static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
+ {
+ using Type = RenderPassInputAttachmentAspectCreateInfo;
+ };
+ using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+
struct RenderPassMultiviewCreateInfo
{
- VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {},
- const uint32_t* pViewMasks_ = {},
- uint32_t dependencyCount_ = {},
- const int32_t* pViewOffsets_ = {},
- uint32_t correlationMaskCount_ = {},
- const uint32_t* pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT
- : subpassCount( subpassCount_ )
- , pViewMasks( pViewMasks_ )
- , dependencyCount( dependencyCount_ )
- , pViewOffsets( pViewOffsets_ )
- , correlationMaskCount( correlationMaskCount_ )
- , pCorrelationMasks( pCorrelationMasks_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t* pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t* pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t* pCorrelationMasks_ = {}) VULKAN_HPP_NOEXCEPT
+ : subpassCount( subpassCount_ ), pViewMasks( pViewMasks_ ), dependencyCount( dependencyCount_ ), pViewOffsets( pViewOffsets_ ), correlationMaskCount( correlationMaskCount_ ), pCorrelationMasks( pCorrelationMasks_ )
{}
- VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {} )
+ : subpassCount( static_cast<uint32_t>( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassMultiviewCreateInfo ) );
return *this;
}
@@ -56680,6 +75748,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpassCount = static_cast<uint32_t>( viewMasks_.size() );
+ pViewMasks = viewMasks_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = dependencyCount_;
@@ -56692,6 +75769,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
+ pViewOffsets = viewOffsets_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
{
correlationMaskCount = correlationMaskCount_;
@@ -56704,6 +75790,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
+ pCorrelationMasks = correlationMasks_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
@@ -56714,6 +75810,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default;
+#else
bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -56730,6 +75830,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
@@ -56740,26 +75843,44 @@ namespace VULKAN_HPP_NAMESPACE
const int32_t* pViewOffsets = {};
uint32_t correlationMaskCount = {};
const uint32_t* pCorrelationMasks = {};
+
};
static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
+ {
+ using Type = RenderPassMultiviewCreateInfo;
+ };
+ using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+
struct SubpassSampleLocationsEXT
{
- VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {},
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : subpassIndex( subpassIndex_ )
- , sampleLocationsInfo( sampleLocationsInfo_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : subpassIndex( subpassIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
{}
+ VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
+ return *this;
+ }
+
+ SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassSampleLocationsEXT ) );
return *this;
}
@@ -56769,12 +75890,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
+
operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
@@ -56785,6 +75907,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassSampleLocationsEXT const& ) const = default;
+#else
bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( subpassIndex == rhs.subpassIndex )
@@ -56795,40 +75921,51 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t subpassIndex = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
};
static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
struct RenderPassSampleLocationsBeginInfoEXT
{
- VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {},
- uint32_t postSubpassSampleLocationsCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
- , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
- , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
- , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+ : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ), pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ), postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ), pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
{}
- VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {} )
+ : attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) );
return *this;
}
@@ -56850,6 +75987,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
+ pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
@@ -56862,6 +76008,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ {
+ postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
+ pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
@@ -56872,6 +76028,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default;
+#else
bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -56886,6 +76046,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
@@ -56894,245 +76057,209 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {};
uint32_t postSubpassSampleLocationsCount = {};
const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {};
+
};
static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct SamplerCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
{
- VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
- VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
- VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
- float mipLodBias_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {},
- float maxAnisotropy_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {},
- VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
- float minLod_ = {},
- float maxLod_ = {},
- VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
- VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , magFilter( magFilter_ )
- , minFilter( minFilter_ )
- , mipmapMode( mipmapMode_ )
- , addressModeU( addressModeU_ )
- , addressModeV( addressModeV_ )
- , addressModeW( addressModeW_ )
- , mipLodBias( mipLodBias_ )
- , anisotropyEnable( anisotropyEnable_ )
- , maxAnisotropy( maxAnisotropy_ )
- , compareEnable( compareEnable_ )
- , compareOp( compareOp_ )
- , minLod( minLod_ )
- , maxLod( maxLod_ )
- , borderColor( borderColor_ )
- , unnormalizedCoordinates( unnormalizedCoordinates_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SamplerCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) - offsetof( SamplerCreateInfo, pNext ) );
- return *this;
- }
+ using Type = RenderPassSampleLocationsBeginInfoEXT;
+ };
- SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ struct RenderPassTransformBeginInfoQCOM
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
+ : transform( transform_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
return *this;
}
- SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassTransformBeginInfoQCOM ) );
return *this;
}
- SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassTransformBeginInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ pNext = pNext_;
return *this;
}
- SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
- magFilter = magFilter_;
+ transform = transform_;
return *this;
}
- SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
- {
- minFilter = minFilter_;
- return *this;
- }
- SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassTransformBeginInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
{
- mipmapMode = mipmapMode_;
- return *this;
+ return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
}
- SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
- addressModeU = addressModeU_;
- return *this;
+ return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
}
- SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
- {
- addressModeV = addressModeV_;
- return *this;
- }
- SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( RenderPassTransformBeginInfoQCOM const& ) const = default;
+#else
+ bool operator==( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- addressModeW = addressModeW_;
- return *this;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( transform == rhs.transform );
}
- SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- mipLodBias = mipLodBias_;
- return *this;
+ return !operator==( rhs );
}
+#endif
- SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
- {
- anisotropyEnable = anisotropyEnable_;
- return *this;
- }
- SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
- {
- maxAnisotropy = maxAnisotropy_;
- return *this;
- }
- SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+
+ };
+ static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
+ {
+ using Type = RenderPassTransformBeginInfoQCOM;
+ };
+
+ struct SamplerCustomBorderColorCreateInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
+ : customBorderColor( customBorderColor_ ), format( format_ )
+ {}
+
+ SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- compareEnable = compareEnable_;
- return *this;
+ *this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- compareOp = compareOp_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
return *this;
}
- SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- minLod = minLod_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerCustomBorderColorCreateInfoEXT ) );
return *this;
}
- SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCustomBorderColorCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- maxLod = maxLod_;
+ pNext = pNext_;
return *this;
}
- SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
{
- borderColor = borderColor_;
+ customBorderColor = customBorderColor_;
return *this;
}
- SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
- unnormalizedCoordinates = unnormalizedCoordinates_;
+ format = format_;
return *this;
}
- operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
- }
- operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerCustomBorderColorCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSamplerCreateInfo*>( this );
+ return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
}
- bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( magFilter == rhs.magFilter )
- && ( minFilter == rhs.minFilter )
- && ( mipmapMode == rhs.mipmapMode )
- && ( addressModeU == rhs.addressModeU )
- && ( addressModeV == rhs.addressModeV )
- && ( addressModeW == rhs.addressModeW )
- && ( mipLodBias == rhs.mipLodBias )
- && ( anisotropyEnable == rhs.anisotropyEnable )
- && ( maxAnisotropy == rhs.maxAnisotropy )
- && ( compareEnable == rhs.compareEnable )
- && ( compareOp == rhs.compareOp )
- && ( minLod == rhs.minLod )
- && ( maxLod == rhs.maxLod )
- && ( borderColor == rhs.borderColor )
- && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+ return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
}
- bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
- VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
- VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
- float mipLodBias = {};
- VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
- float maxAnisotropy = {};
- VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
- VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
- float minLod = {};
- float maxLod = {};
- VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
- VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
+ VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+ };
+ static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SamplerCustomBorderColorCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
+ {
+ using Type = SamplerCustomBorderColorCreateInfoEXT;
};
- static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct SamplerReductionModeCreateInfo
{
- VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
- : reductionMode( reductionMode_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage) VULKAN_HPP_NOEXCEPT
+ : reductionMode( reductionMode_ )
{}
- VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) - offsetof( SamplerReductionModeCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
+ return *this;
+ }
- SamplerReductionModeCreateInfo& operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerReductionModeCreateInfo ) );
return *this;
}
@@ -57148,6 +76275,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
@@ -57158,6 +76286,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SamplerReductionModeCreateInfo const& ) const = default;
+#else
bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -57169,173 +76301,57 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
+
};
static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct SamplerYcbcrConversionCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
{
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
- VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
- VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
- VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , ycbcrModel( ycbcrModel_ )
- , ycbcrRange( ycbcrRange_ )
- , components( components_ )
- , xChromaOffset( xChromaOffset_ )
- , yChromaOffset( yChromaOffset_ )
- , chromaFilter( chromaFilter_ )
- , forceExplicitReconstruction( forceExplicitReconstruction_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) );
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>(&rhs);
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
- {
- format = format_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
- {
- ycbcrModel = ycbcrModel_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
- {
- ycbcrRange = ycbcrRange_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
- {
- components = components_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- xChromaOffset = xChromaOffset_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- yChromaOffset = yChromaOffset_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
- {
- chromaFilter = chromaFilter_;
- return *this;
- }
-
- SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
- {
- forceExplicitReconstruction = forceExplicitReconstruction_;
- return *this;
- }
-
- operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
- }
-
- operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
- }
-
- bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( format == rhs.format )
- && ( ycbcrModel == rhs.ycbcrModel )
- && ( ycbcrRange == rhs.ycbcrRange )
- && ( components == rhs.components )
- && ( xChromaOffset == rhs.xChromaOffset )
- && ( yChromaOffset == rhs.yChromaOffset )
- && ( chromaFilter == rhs.chromaFilter )
- && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
- }
-
- bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
- VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
- VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
- VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
+ using Type = SamplerReductionModeCreateInfo;
};
- static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
struct SamplerYcbcrConversionImageFormatProperties
{
- SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
{}
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
return *this;
}
+ SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerYcbcrConversionImageFormatProperties ) );
+ return *this;
+ }
+
+
operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
@@ -57346,6 +76362,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default;
+#else
bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -57357,35 +76377,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
void* pNext = {};
uint32_t combinedImageSamplerDescriptorCount = {};
+
};
static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
+ {
+ using Type = SamplerYcbcrConversionImageFormatProperties;
+ };
+ using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
+
struct SamplerYcbcrConversionInfo
{
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT
- : conversion( conversion_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}) VULKAN_HPP_NOEXCEPT
+ : conversion( conversion_ )
{}
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
+ return *this;
+ }
- SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerYcbcrConversionInfo ) );
return *this;
}
@@ -57401,6 +76439,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
@@ -57411,6 +76450,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default;
+#else
bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -57422,330 +76465,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
+
};
static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
- struct SemaphoreCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
{
- VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) - offsetof( SemaphoreCreateInfo, pNext ) );
- return *this;
- }
-
- SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>(&rhs);
- return *this;
- }
-
- SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
- }
-
- operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
- }
-
- bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags );
- }
-
- bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
+ using Type = SamplerYcbcrConversionInfo;
};
- static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
-
- struct SemaphoreGetFdInfoKHR
- {
- VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , handleType( handleType_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) - offsetof( SemaphoreGetFdInfoKHR, pNext ) );
- return *this;
- }
-
- SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>(&rhs);
- return *this;
- }
-
- SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
- {
- semaphore = semaphore_;
- return *this;
- }
-
- SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
- {
- handleType = handleType_;
- return *this;
- }
-
- operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
- }
-
- operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
- }
-
- bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( semaphore == rhs.semaphore )
- && ( handleType == rhs.handleType );
- }
-
- bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- };
- static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
+ using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
- struct SemaphoreGetWin32HandleInfoKHR
+ struct SemaphoreTypeCreateInfo
{
- VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , handleType( handleType_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) );
- return *this;
- }
-
- SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>(&rhs);
- return *this;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
- SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
- {
- semaphore = semaphore_;
- return *this;
- }
-
- SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
- {
- handleType = handleType_;
- return *this;
- }
-
- operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
- }
-
- operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
- }
-
- bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( semaphore == rhs.semaphore )
- && ( handleType == rhs.handleType );
- }
-
- bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- };
- static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- struct SemaphoreSignalInfo
- {
- VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , value( value_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}) VULKAN_HPP_NOEXCEPT
+ : semaphoreType( semaphoreType_ ), initialValue( initialValue_ )
{}
- VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) - offsetof( SemaphoreSignalInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SemaphoreSignalInfo& operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>(&rhs);
- return *this;
- }
-
- SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- semaphore = semaphore_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
return *this;
}
- SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- value = value_;
- return *this;
- }
-
- operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
- }
-
- operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
- }
-
- bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( semaphore == rhs.semaphore )
- && ( value == rhs.value );
- }
-
- bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- uint64_t value = {};
- };
- static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
-
- struct SemaphoreTypeCreateInfo
- {
- VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary,
- uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphoreType( semaphoreType_ )
- , initialValue( initialValue_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) - offsetof( SemaphoreTypeCreateInfo, pNext ) );
- return *this;
- }
-
- SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SemaphoreTypeCreateInfo& operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreTypeCreateInfo ) );
return *this;
}
@@ -57767,6 +76533,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
@@ -57777,6 +76544,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreTypeCreateInfo const& ) const = default;
+#else
bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -57789,216 +76560,124 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
uint64_t initialValue = {};
+
};
static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct SemaphoreWaitInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
{
- VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {},
- uint32_t semaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {},
- const uint64_t* pValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , semaphoreCount( semaphoreCount_ )
- , pSemaphores( pSemaphores_ )
- , pValues( pValues_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) - offsetof( SemaphoreWaitInfo, pNext ) );
- return *this;
- }
-
- SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SemaphoreWaitInfo& operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>(&rhs);
- return *this;
- }
-
- SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
- {
- semaphoreCount = semaphoreCount_;
- return *this;
- }
-
- SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
- {
- pSemaphores = pSemaphores_;
- return *this;
- }
-
- SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
- {
- pValues = pValues_;
- return *this;
- }
-
- operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
- }
-
- operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
- }
-
- bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( semaphoreCount == rhs.semaphoreCount )
- && ( pSemaphores == rhs.pSemaphores )
- && ( pValues == rhs.pValues );
- }
-
- bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
- uint32_t semaphoreCount = {};
- const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {};
- const uint64_t* pValues = {};
+ using Type = SemaphoreTypeCreateInfo;
};
- static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
+ using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
- struct ShaderModuleCreateInfo
+ struct SetStateFlagsIndirectCommandNV
{
- VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {},
- size_t codeSize_ = {},
- const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , codeSize( codeSize_ )
- , pCode( pCode_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT
+ : data( data_ )
{}
- VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
return *this;
}
- ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SetStateFlagsIndirectCommandNV ) );
return *this;
}
- ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ data = data_;
return *this;
}
- ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
- {
- codeSize = codeSize_;
- return *this;
- }
- ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT
+ operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
{
- pCode = pCode_;
- return *this;
+ return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
}
- operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
+ return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
}
- operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
- }
- bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default;
+#else
+ bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( codeSize == rhs.codeSize )
- && ( pCode == rhs.pCode );
+ return ( data == rhs.data );
}
- bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
- size_t codeSize = {};
- const uint32_t* pCode = {};
+ uint32_t data = {};
+
};
- static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
struct ShaderModuleValidationCacheCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT
- : validationCache( validationCache_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}) VULKAN_HPP_NOEXCEPT
+ : validationCache( validationCache_ )
{}
- VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) );
return *this;
}
@@ -58014,6 +76693,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
@@ -58024,6 +76704,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default;
+#else
bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -58035,40 +76719,55 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
+
};
static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
+ {
+ using Type = ShaderModuleValidationCacheCreateInfoEXT;
+ };
+
struct ShaderResourceUsageAMD
{
- ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {},
- uint32_t numUsedSgprs_ = {},
- uint32_t ldsSizePerLocalWorkGroup_ = {},
- size_t ldsUsageSizeInBytes_ = {},
- size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT
- : numUsedVgprs( numUsedVgprs_ )
- , numUsedSgprs( numUsedSgprs_ )
- , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ )
- , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ )
- , scratchMemUsageInBytes( scratchMemUsageInBytes_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT
+ : numUsedVgprs( numUsedVgprs_ ), numUsedSgprs( numUsedSgprs_ ), ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ), ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ), scratchMemUsageInBytes( scratchMemUsageInBytes_ )
{}
+ VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
+ return *this;
+ }
- ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderResourceUsageAMD ) );
return *this;
}
+
operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
@@ -58079,6 +76778,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShaderResourceUsageAMD const& ) const = default;
+#else
bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( numUsedVgprs == rhs.numUsedVgprs )
@@ -58092,6 +76795,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
uint32_t numUsedVgprs = {};
@@ -58099,41 +76805,41 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t ldsSizePerLocalWorkGroup = {};
size_t ldsUsageSizeInBytes = {};
size_t scratchMemUsageInBytes = {};
+
};
static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
struct ShaderStatisticsInfoAMD
{
- ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {},
- VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {},
- uint32_t numPhysicalVgprs_ = {},
- uint32_t numPhysicalSgprs_ = {},
- uint32_t numAvailableVgprs_ = {},
- uint32_t numAvailableSgprs_ = {},
- std::array<uint32_t,3> const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderStageMask( shaderStageMask_ )
- , resourceUsage( resourceUsage_ )
- , numPhysicalVgprs( numPhysicalVgprs_ )
- , numPhysicalSgprs( numPhysicalSgprs_ )
- , numAvailableVgprs( numAvailableVgprs_ )
- , numAvailableSgprs( numAvailableSgprs_ )
- , computeWorkGroupSize{}
- {
- VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( computeWorkGroupSize, computeWorkGroupSize_ );
- }
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array<uint32_t,3> const& computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+ : shaderStageMask( shaderStageMask_ ), resourceUsage( resourceUsage_ ), numPhysicalVgprs( numPhysicalVgprs_ ), numPhysicalSgprs( numPhysicalSgprs_ ), numAvailableVgprs( numAvailableVgprs_ ), numAvailableSgprs( numAvailableSgprs_ ), computeWorkGroupSize( computeWorkGroupSize_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
+ return *this;
+ }
- ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderStatisticsInfoAMD ) );
return *this;
}
+
operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
@@ -58144,6 +76850,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default;
+#else
bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( shaderStageMask == rhs.shaderStageMask )
@@ -58152,13 +76862,16 @@ namespace VULKAN_HPP_NAMESPACE
&& ( numPhysicalSgprs == rhs.numPhysicalSgprs )
&& ( numAvailableVgprs == rhs.numAvailableVgprs )
&& ( numAvailableSgprs == rhs.numAvailableSgprs )
- && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 );
+ && ( computeWorkGroupSize == rhs.computeWorkGroupSize );
}
bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
@@ -58167,34 +76880,43 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t numPhysicalSgprs = {};
uint32_t numAvailableVgprs = {};
uint32_t numAvailableSgprs = {};
- uint32_t computeWorkGroupSize[3] = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
+
};
static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
struct SharedPresentSurfaceCapabilitiesKHR
{
- SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
+ : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
{}
- VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
+ return *this;
+ }
- SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SharedPresentSurfaceCapabilitiesKHR ) );
return *this;
}
+
operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
@@ -58205,6 +76927,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default;
+#else
bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -58216,255 +76942,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
void* pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
+
};
static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
- struct SparseImageFormatProperties
+ template <>
+ struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
{
- SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {},
- VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectMask( aspectMask_ )
- , imageGranularity( imageGranularity_ )
- , flags( flags_ )
- {}
-
- SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>(&rhs);
- return *this;
- }
-
- operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
- }
-
- operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
- }
-
- bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( aspectMask == rhs.aspectMask )
- && ( imageGranularity == rhs.imageGranularity )
- && ( flags == rhs.flags );
- }
-
- bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
- VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
- VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
+ using Type = SharedPresentSurfaceCapabilitiesKHR;
};
- static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
-
- struct SparseImageFormatProperties2
- {
- SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
- : properties( properties_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) );
- return *this;
- }
-
- SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>(&rhs);
- return *this;
- }
-
- operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
- }
- operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
- }
-
- bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( properties == rhs.properties );
- }
-
- bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
- };
- static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
-
- struct SparseImageMemoryRequirements
+#ifdef VK_USE_PLATFORM_GGP
+ struct StreamDescriptorSurfaceCreateInfoGGP
{
- SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {},
- uint32_t imageMipTailFirstLod_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT
- : formatProperties( formatProperties_ )
- , imageMipTailFirstLod( imageMipTailFirstLod_ )
- , imageMipTailSize( imageMipTailSize_ )
- , imageMipTailOffset( imageMipTailOffset_ )
- , imageMipTailStride( imageMipTailStride_ )
- {}
-
- SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>(&rhs);
- return *this;
- }
-
- operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
- }
-
- operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
- }
-
- bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( formatProperties == rhs.formatProperties )
- && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
- && ( imageMipTailSize == rhs.imageMipTailSize )
- && ( imageMipTailOffset == rhs.imageMipTailOffset )
- && ( imageMipTailStride == rhs.imageMipTailStride );
- }
-
- bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
- uint32_t imageMipTailFirstLod = {};
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
- };
- static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
- struct SparseImageMemoryRequirements2
- {
- SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryRequirements( memoryRequirements_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), streamDescriptor( streamDescriptor_ )
{}
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>(&rhs);
- return *this;
- }
-
- operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
- }
-
- operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
- }
-
- bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( memoryRequirements == rhs.memoryRequirements );
- }
-
- bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
- };
- static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
-
-#ifdef VK_USE_PLATFORM_GGP
-
- struct StreamDescriptorSurfaceCreateInfoGGP
- {
- VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {},
- GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , streamDescriptor( streamDescriptor_ )
- {}
-
- VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & operator=( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
return *this;
}
- StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
- }
-
- StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( StreamDescriptorSurfaceCreateInfoGGP ) );
return *this;
}
@@ -58486,6 +77010,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
@@ -58496,243 +77021,70 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default;
+#else
bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && ( streamDescriptor == rhs.streamDescriptor );
+ && ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
}
bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
GgpStreamDescriptor streamDescriptor = {};
+
};
static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_GGP*/
- struct SubmitInfo
+ template <>
+ struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
{
- VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {},
- uint32_t commandBufferCount_ = {},
- const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {},
- uint32_t signalSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , pWaitDstStageMask( pWaitDstStageMask_ )
- , commandBufferCount( commandBufferCount_ )
- , pCommandBuffers( pCommandBuffers_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphores( pSignalSemaphores_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SubmitInfo & operator=( VULKAN_HPP_NAMESPACE::SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) - offsetof( SubmitInfo, pNext ) );
- return *this;
- }
-
- SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>(&rhs);
- return *this;
- }
-
- SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
- {
- waitSemaphoreCount = waitSemaphoreCount_;
- return *this;
- }
-
- SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
- {
- pWaitSemaphores = pWaitSemaphores_;
- return *this;
- }
-
- SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
- {
- pWaitDstStageMask = pWaitDstStageMask_;
- return *this;
- }
-
- SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
- {
- commandBufferCount = commandBufferCount_;
- return *this;
- }
-
- SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
- {
- pCommandBuffers = pCommandBuffers_;
- return *this;
- }
-
- SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
- {
- signalSemaphoreCount = signalSemaphoreCount_;
- return *this;
- }
-
- SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
- {
- pSignalSemaphores = pSignalSemaphores_;
- return *this;
- }
-
- operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubmitInfo*>( this );
- }
-
- operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubmitInfo*>( this );
- }
-
- bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
- && ( pWaitSemaphores == rhs.pWaitSemaphores )
- && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
- && ( commandBufferCount == rhs.commandBufferCount )
- && ( pCommandBuffers == rhs.pCommandBuffers )
- && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
- && ( pSignalSemaphores == rhs.pSignalSemaphores );
- }
-
- bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
- const void* pNext = {};
- uint32_t waitSemaphoreCount = {};
- const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
- const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {};
- uint32_t commandBufferCount = {};
- const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {};
- uint32_t signalSemaphoreCount = {};
- const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
+ using Type = StreamDescriptorSurfaceCreateInfoGGP;
};
- static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_GGP*/
- struct SubpassBeginInfo
+ struct SubpassDescriptionDepthStencilResolve
{
- VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
- : contents( contents_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {}) VULKAN_HPP_NOEXCEPT
+ : depthResolveMode( depthResolveMode_ ), stencilResolveMode( stencilResolveMode_ ), pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
{}
- VULKAN_HPP_NAMESPACE::SubpassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) - offsetof( SubpassBeginInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SubpassBeginInfo& operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>(&rhs);
- return *this;
- }
-
- SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
- {
- contents = contents_;
- return *this;
- }
-
- operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
- }
-
- operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassBeginInfo*>( this );
- }
-
- bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( contents == rhs.contents );
- }
-
- bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
- };
- static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
-
- struct SubpassDescriptionDepthStencilResolve
- {
- VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
- VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
- const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
- : depthResolveMode( depthResolveMode_ )
- , stencilResolveMode( stencilResolveMode_ )
- , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve & operator=( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) - offsetof( SubpassDescriptionDepthStencilResolve, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
return *this;
}
- SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = rhs;
- }
-
- SubpassDescriptionDepthStencilResolve& operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDescriptionDepthStencilResolve ) );
return *this;
}
@@ -58760,6 +77112,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
@@ -58770,6 +77123,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassDescriptionDepthStencilResolve const& ) const = default;
+#else
bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -58783,6 +77140,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
@@ -58790,313 +77150,46 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {};
+
};
static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolve>::value, "struct wrapper is not a standard layout!" );
- struct SubpassEndInfo
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
{
- VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
- {}
-
- VULKAN_HPP_NAMESPACE::SubpassEndInfo & operator=( VULKAN_HPP_NAMESPACE::SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) - offsetof( SubpassEndInfo, pNext ) );
- return *this;
- }
-
- SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SubpassEndInfo& operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>(&rhs);
- return *this;
- }
-
- SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassEndInfo*>( this );
- }
-
- operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassEndInfo*>( this );
- }
-
- bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext );
- }
-
- bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
- const void* pNext = {};
+ using Type = SubpassDescriptionDepthStencilResolve;
};
- static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+ using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
- struct SurfaceCapabilities2EXT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct SurfaceCapabilitiesFullScreenExclusiveEXT
{
- SurfaceCapabilities2EXT( uint32_t minImageCount_ = {},
- uint32_t maxImageCount_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
- uint32_t maxImageArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
- : minImageCount( minImageCount_ )
- , maxImageCount( maxImageCount_ )
- , currentExtent( currentExtent_ )
- , minImageExtent( minImageExtent_ )
- , maxImageExtent( maxImageExtent_ )
- , maxImageArrayLayers( maxImageArrayLayers_ )
- , supportedTransforms( supportedTransforms_ )
- , currentTransform( currentTransform_ )
- , supportedCompositeAlpha( supportedCompositeAlpha_ )
- , supportedUsageFlags( supportedUsageFlags_ )
- , supportedSurfaceCounters( supportedSurfaceCounters_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) - offsetof( SurfaceCapabilities2EXT, pNext ) );
- return *this;
- }
-
- SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
- SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>(&rhs);
- return *this;
- }
-
- operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
- }
-
- operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
- }
-
- bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( minImageCount == rhs.minImageCount )
- && ( maxImageCount == rhs.maxImageCount )
- && ( currentExtent == rhs.currentExtent )
- && ( minImageExtent == rhs.minImageExtent )
- && ( maxImageExtent == rhs.maxImageExtent )
- && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
- && ( supportedTransforms == rhs.supportedTransforms )
- && ( currentTransform == rhs.currentTransform )
- && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
- && ( supportedUsageFlags == rhs.supportedUsageFlags )
- && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
- }
-
- bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
- void* pNext = {};
- uint32_t minImageCount = {};
- uint32_t maxImageCount = {};
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
- uint32_t maxImageArrayLayers = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
- };
- static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
-
- struct SurfaceCapabilitiesKHR
- {
- SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {},
- uint32_t maxImageCount_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
- uint32_t maxImageArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : minImageCount( minImageCount_ )
- , maxImageCount( maxImageCount_ )
- , currentExtent( currentExtent_ )
- , minImageExtent( minImageExtent_ )
- , maxImageExtent( maxImageExtent_ )
- , maxImageArrayLayers( maxImageArrayLayers_ )
- , supportedTransforms( supportedTransforms_ )
- , currentTransform( currentTransform_ )
- , supportedCompositeAlpha( supportedCompositeAlpha_ )
- , supportedUsageFlags( supportedUsageFlags_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}) VULKAN_HPP_NOEXCEPT
+ : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
{}
- SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>(&rhs);
- return *this;
- }
-
- operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
- }
-
- operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
- }
-
- bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( minImageCount == rhs.minImageCount )
- && ( maxImageCount == rhs.maxImageCount )
- && ( currentExtent == rhs.currentExtent )
- && ( minImageExtent == rhs.minImageExtent )
- && ( maxImageExtent == rhs.maxImageExtent )
- && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
- && ( supportedTransforms == rhs.supportedTransforms )
- && ( currentTransform == rhs.currentTransform )
- && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
- && ( supportedUsageFlags == rhs.supportedUsageFlags );
- }
-
- bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- uint32_t minImageCount = {};
- uint32_t maxImageCount = {};
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
- uint32_t maxImageArrayLayers = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
- };
- static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
-
- struct SurfaceCapabilities2KHR
- {
- SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT
- : surfaceCapabilities( surfaceCapabilities_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) - offsetof( SurfaceCapabilities2KHR, pNext ) );
- return *this;
- }
-
- SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
return *this;
}
- operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
- }
-
- operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
- }
-
- bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( surfaceCapabilities == rhs.surfaceCapabilities );
- }
-
- bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
- };
- static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
- struct SurfaceCapabilitiesFullScreenExclusiveEXT
- {
- VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT
- : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) );
- return *this;
- }
-
- SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) );
return *this;
}
@@ -59112,6 +77205,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
@@ -59122,6 +77216,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default;
+#else
bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59133,138 +77231,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
+
};
static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct SurfaceFormatKHR
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
{
- SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , colorSpace( colorSpace_ )
- {}
-
- SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>(&rhs);
- return *this;
- }
-
- operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
- }
-
- operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
- }
-
- bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( format == rhs.format )
- && ( colorSpace == rhs.colorSpace );
- }
-
- bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+ using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
};
- static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct SurfaceFormat2KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct SurfaceFullScreenExclusiveInfoEXT
{
- SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT
- : surfaceFormat( surfaceFormat_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault) VULKAN_HPP_NOEXCEPT
+ : fullScreenExclusive( fullScreenExclusive_ )
{}
- VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) - offsetof( SurfaceFormat2KHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
return *this;
}
- operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT
+ SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
- }
-
- operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
- }
-
- bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( surfaceFormat == rhs.surfaceFormat );
- }
-
- bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
- void* pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
- };
- static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
- struct SurfaceFullScreenExclusiveInfoEXT
- {
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
- : fullScreenExclusive( fullScreenExclusive_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) );
- return *this;
- }
-
- SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFullScreenExclusiveInfoEXT ) );
return *this;
}
@@ -59280,6 +77294,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
@@ -59290,6 +77305,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default;
+#else
bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59301,38 +77320,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
void* pNext = {};
VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
+
};
static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
+ {
+ using Type = SurfaceFullScreenExclusiveInfoEXT;
+ };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
-
struct SurfaceFullScreenExclusiveWin32InfoEXT
{
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT
- : hmonitor( hmonitor_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}) VULKAN_HPP_NOEXCEPT
+ : hmonitor( hmonitor_ )
{}
- VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) );
return *this;
}
@@ -59348,6 +77383,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
@@ -59358,6 +77394,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default;
+#else
bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59369,36 +77409,53 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
const void* pNext = {};
HMONITOR hmonitor = {};
+
};
static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
+ {
+ using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
+ };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
struct SurfaceProtectedCapabilitiesKHR
{
- VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportsProtected( supportsProtected_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}) VULKAN_HPP_NOEXCEPT
+ : supportsProtected( supportsProtected_ )
{}
- VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
+ return *this;
+ }
- SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceProtectedCapabilitiesKHR ) );
return *this;
}
@@ -59414,6 +77471,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
@@ -59424,6 +77482,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default;
+#else
bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59435,35 +77497,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
+
};
static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
+ {
+ using Type = SurfaceProtectedCapabilitiesKHR;
+ };
+
struct SwapchainCounterCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
- : surfaceCounters( surfaceCounters_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
+ : surfaceCounters( surfaceCounters_ )
{}
- VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
return *this;
}
@@ -59479,6 +77558,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
@@ -59489,6 +77569,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default;
+#else
bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59500,250 +77584,52 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
+
};
static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct SwapchainCreateInfoKHR
+ template <>
+ struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
{
- VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {},
- uint32_t minImageCount_ = {},
- VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
- uint32_t imageArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t* pQueueFamilyIndices_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
- VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
- VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
- VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , surface( surface_ )
- , minImageCount( minImageCount_ )
- , imageFormat( imageFormat_ )
- , imageColorSpace( imageColorSpace_ )
- , imageExtent( imageExtent_ )
- , imageArrayLayers( imageArrayLayers_ )
- , imageUsage( imageUsage_ )
- , imageSharingMode( imageSharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- , preTransform( preTransform_ )
- , compositeAlpha( compositeAlpha_ )
- , presentMode( presentMode_ )
- , clipped( clipped_ )
- , oldSwapchain( oldSwapchain_ )
- {}
-
- VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) - offsetof( SwapchainCreateInfoKHR, pNext ) );
- return *this;
- }
-
- SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>(&rhs);
- return *this;
- }
-
- SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
- {
- surface = surface_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
- {
- minImageCount = minImageCount_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
- {
- imageFormat = imageFormat_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
- {
- imageColorSpace = imageColorSpace_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
- {
- imageExtent = imageExtent_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
- {
- imageArrayLayers = imageArrayLayers_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
- {
- imageUsage = imageUsage_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
- {
- imageSharingMode = imageSharingMode_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
- {
- queueFamilyIndexCount = queueFamilyIndexCount_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
- {
- pQueueFamilyIndices = pQueueFamilyIndices_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
- {
- preTransform = preTransform_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
- {
- compositeAlpha = compositeAlpha_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
- {
- presentMode = presentMode_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
- {
- clipped = clipped_;
- return *this;
- }
-
- SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
- {
- oldSwapchain = oldSwapchain_;
- return *this;
- }
-
- operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
- }
-
- operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
- }
-
- bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( surface == rhs.surface )
- && ( minImageCount == rhs.minImageCount )
- && ( imageFormat == rhs.imageFormat )
- && ( imageColorSpace == rhs.imageColorSpace )
- && ( imageExtent == rhs.imageExtent )
- && ( imageArrayLayers == rhs.imageArrayLayers )
- && ( imageUsage == rhs.imageUsage )
- && ( imageSharingMode == rhs.imageSharingMode )
- && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
- && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
- && ( preTransform == rhs.preTransform )
- && ( compositeAlpha == rhs.compositeAlpha )
- && ( presentMode == rhs.presentMode )
- && ( clipped == rhs.clipped )
- && ( oldSwapchain == rhs.oldSwapchain );
- }
-
- bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
- uint32_t minImageCount = {};
- VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
- uint32_t imageArrayLayers = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
- VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
- uint32_t queueFamilyIndexCount = {};
- const uint32_t* pQueueFamilyIndices = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
- VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
- VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
- VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
+ using Type = SwapchainCounterCreateInfoEXT;
};
- static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct SwapchainDisplayNativeHdrCreateInfoAMD
{
- VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : localDimmingEnable( localDimmingEnable_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}) VULKAN_HPP_NOEXCEPT
+ : localDimmingEnable( localDimmingEnable_ )
{}
- VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
+ return *this;
+ }
- SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) );
return *this;
}
@@ -59759,6 +77645,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
@@ -59769,6 +77656,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default;
+#else
bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59780,38 +77671,56 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
+
};
static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
+ {
+ using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
+ };
+
struct TextureLODGatherFormatPropertiesAMD
{
- TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}) VULKAN_HPP_NOEXCEPT
+ : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
{}
- VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
+ return *this;
+ }
- TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( TextureLODGatherFormatPropertiesAMD ) );
return *this;
}
+
operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
@@ -59822,6 +77731,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default;
+#else
bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59833,41 +77746,58 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
+
};
static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
+ {
+ using Type = TextureLODGatherFormatPropertiesAMD;
+ };
+
struct TimelineSemaphoreSubmitInfo
{
- VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {},
- const uint64_t* pWaitSemaphoreValues_ = {},
- uint32_t signalSemaphoreValueCount_ = {},
- const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValueCount( waitSemaphoreValueCount_ )
- , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
- , signalSemaphoreValueCount( signalSemaphoreValueCount_ )
- , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreValueCount( waitSemaphoreValueCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValueCount( signalSemaphoreValueCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
{}
- VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) - offsetof( TimelineSemaphoreSubmitInfo, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- TimelineSemaphoreSubmitInfo& operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
+ : waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
+ return *this;
+ }
+
+ TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( TimelineSemaphoreSubmitInfo ) );
return *this;
}
@@ -59889,6 +77819,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+ pWaitSemaphoreValues = waitSemaphoreValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValueCount = signalSemaphoreValueCount_;
@@ -59901,6 +77840,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+ pSignalSemaphoreValues = signalSemaphoreValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
@@ -59911,6 +77860,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( TimelineSemaphoreSubmitInfo const& ) const = default;
+#else
bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -59925,6 +77878,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
@@ -59933,121 +77889,145 @@ namespace VULKAN_HPP_NAMESPACE
const uint64_t* pWaitSemaphoreValues = {};
uint32_t signalSemaphoreValueCount = {};
const uint64_t* pSignalSemaphoreValues = {};
+
};
static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
- struct ValidationCacheCreateInfoEXT
+ template <>
+ struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
+ {
+ using Type = TimelineSemaphoreSubmitInfo;
+ };
+ using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ struct TraceRaysIndirectCommandKHR
{
- VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {},
- size_t initialDataSize_ = {},
- const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , initialDataSize( initialDataSize_ )
- , pInitialData( pInitialData_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+ : width( width_ ), height( height_ ), depth( depth_ )
{}
- VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, uint32_t depth_ = {} )
+ : width( extent2D.width )
+ , height( extent2D.height )
+ , depth( depth_ )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
return *this;
}
- ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( TraceRaysIndirectCommandKHR ) );
return *this;
}
- ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ width = width_;
return *this;
}
- ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+ TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
- initialDataSize = initialDataSize_;
+ height = height_;
return *this;
}
- ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
+ TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
{
- pInitialData = pInitialData_;
+ depth = depth_;
return *this;
}
- operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+
+ operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
}
- operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
}
- bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default;
+#else
+ bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( initialDataSize == rhs.initialDataSize )
- && ( pInitialData == rhs.pInitialData );
+ return ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( depth == rhs.depth );
}
- bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
- size_t initialDataSize = {};
- const void* pInitialData = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t depth = {};
+
};
- static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
struct ValidationFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {},
- const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {},
- uint32_t disabledValidationFeatureCount_ = {},
- const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : enabledValidationFeatureCount( enabledValidationFeatureCount_ )
- , pEnabledValidationFeatures( pEnabledValidationFeatures_ )
- , disabledValidationFeatureCount( disabledValidationFeatureCount_ )
- , pDisabledValidationFeatures( pDisabledValidationFeatures_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+ : enabledValidationFeatureCount( enabledValidationFeatureCount_ ), pEnabledValidationFeatures( pEnabledValidationFeatures_ ), disabledValidationFeatureCount( disabledValidationFeatureCount_ ), pDisabledValidationFeatures( pDisabledValidationFeatures_ )
{}
- VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) - offsetof( ValidationFeaturesEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {} )
+ : enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ValidationFeaturesEXT ) );
return *this;
}
@@ -60069,6 +78049,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ {
+ enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
+ pEnabledValidationFeatures = enabledValidationFeatures_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationFeatureCount = disabledValidationFeatureCount_;
@@ -60081,6 +78070,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ {
+ disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
+ pDisabledValidationFeatures = disabledValidationFeatures_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
@@ -60091,6 +78090,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ValidationFeaturesEXT const& ) const = default;
+#else
bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60105,6 +78108,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
@@ -60113,32 +78119,50 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {};
uint32_t disabledValidationFeatureCount = {};
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {};
+
};
static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ template <>
+ struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
+ {
+ using Type = ValidationFeaturesEXT;
+ };
+
struct ValidationFlagsEXT
{
- VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {},
- const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT
- : disabledValidationCheckCount( disabledValidationCheckCount_ )
- , pDisabledValidationChecks( pDisabledValidationChecks_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {}) VULKAN_HPP_NOEXCEPT
+ : disabledValidationCheckCount( disabledValidationCheckCount_ ), pDisabledValidationChecks( pDisabledValidationChecks_ )
{}
- VULKAN_HPP_NAMESPACE::ValidationFlagsEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) - offsetof( ValidationFlagsEXT, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ )
+ : disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
+ return *this;
+ }
+
+ ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ValidationFlagsEXT ) );
return *this;
}
@@ -60160,6 +78184,16 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+ {
+ disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
+ pDisabledValidationChecks = disabledValidationChecks_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
@@ -60170,6 +78204,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkValidationFlagsEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ValidationFlagsEXT const& ) const = default;
+#else
bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60182,40 +78220,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
const void* pNext = {};
uint32_t disabledValidationCheckCount = {};
const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {};
+
};
static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_VI_NN
+ template <>
+ struct CppType<StructureType, StructureType::eValidationFlagsEXT>
+ {
+ using Type = ValidationFlagsEXT;
+ };
+#ifdef VK_USE_PLATFORM_VI_NN
struct ViSurfaceCreateInfoNN
{
- VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {},
- void* window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , window( window_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void* window_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), window( window_ )
{}
- VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & operator=( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) - offsetof( ViSurfaceCreateInfoNN, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+ ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
+ return *this;
+ }
+
+ ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( ViSurfaceCreateInfoNN ) );
return *this;
}
@@ -60237,6 +78289,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
@@ -60247,6 +78300,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default;
+#else
bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60259,43 +78316,55 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
void* window = {};
+
};
static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
+ {
+ using Type = ViSurfaceCreateInfoNN;
+ };
#endif /*VK_USE_PLATFORM_VI_NN*/
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
struct WaylandSurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {},
- struct wl_display* display_ = {},
- struct wl_surface* surface_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , display( display_ )
- , surface( surface_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display* display_ = {}, struct wl_surface* surface_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), display( display_ ), surface( surface_ )
{}
- VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
- WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
return *this;
}
@@ -60323,6 +78392,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
@@ -60333,6 +78403,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default;
+#else
bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60346,6 +78420,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
@@ -60353,45 +78430,80 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
struct wl_display* display = {};
struct wl_surface* surface = {};
+
};
static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
+ {
+ using Type = WaylandSurfaceCreateInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
-
struct Win32KeyedMutexAcquireReleaseInfoKHR
{
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {},
- const uint64_t* pAcquireKeys_ = {},
- const uint32_t* pAcquireTimeouts_ = {},
- uint32_t releaseCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {},
- const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
- : acquireCount( acquireCount_ )
- , pAcquireSyncs( pAcquireSyncs_ )
- , pAcquireKeys( pAcquireKeys_ )
- , pAcquireTimeouts( pAcquireTimeouts_ )
- , releaseCount( releaseCount_ )
- , pReleaseSyncs( pReleaseSyncs_ )
- , pReleaseKeys( pReleaseKeys_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
+ : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeouts( pAcquireTimeouts_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
{}
- VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
+ : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>(&rhs);
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
+ VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
+ VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
+#else
+ if ( acquireSyncs_.size() != acquireKeys_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
+ }
+ if ( acquireSyncs_.size() != acquireTimeouts_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
+ }
+ if ( acquireKeys_.size() != acquireTimeouts_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
+#else
+ if ( releaseSyncs_.size() != releaseKeys_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
return *this;
}
@@ -60413,18 +78525,45 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+ {
+ acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
+ pAcquireSyncs = acquireSyncs_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireKeys = pAcquireKeys_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+ {
+ acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
+ pAcquireKeys = acquireKeys_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireTimeouts = pAcquireTimeouts_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
+ pAcquireTimeouts = acquireTimeouts_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = releaseCount_;
@@ -60437,12 +78576,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+ {
+ releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
+ pReleaseSyncs = releaseSyncs_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseKeys = pReleaseKeys_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+ {
+ releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
+ pReleaseKeys = releaseKeys_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
@@ -60453,6 +78611,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default;
+#else
bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60470,6 +78632,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
@@ -60481,45 +78646,80 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t releaseCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
const uint64_t* pReleaseKeys = {};
+
};
static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
+ {
+ using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
-
struct Win32KeyedMutexAcquireReleaseInfoNV
{
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {},
- const uint64_t* pAcquireKeys_ = {},
- const uint32_t* pAcquireTimeoutMilliseconds_ = {},
- uint32_t releaseCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {},
- const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
- : acquireCount( acquireCount_ )
- , pAcquireSyncs( pAcquireSyncs_ )
- , pAcquireKeys( pAcquireKeys_ )
- , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
- , releaseCount( releaseCount_ )
- , pReleaseSyncs( pReleaseSyncs_ )
- , pReleaseKeys( pReleaseKeys_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
+ : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
{}
- VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
+ : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
+ VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
+ VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
+#else
+ if ( acquireSyncs_.size() != acquireKeys_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
+ }
+ if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
+ }
+ if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
+#else
+ if ( releaseSyncs_.size() != releaseKeys_.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
return *this;
}
@@ -60541,18 +78741,45 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+ {
+ acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
+ pAcquireSyncs = acquireSyncs_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireKeys = pAcquireKeys_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+ {
+ acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
+ pAcquireKeys = acquireKeys_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+ {
+ acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
+ pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = releaseCount_;
@@ -60565,12 +78792,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+ {
+ releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
+ pReleaseSyncs = releaseSyncs_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseKeys = pReleaseKeys_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+ {
+ releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
+ pReleaseKeys = releaseKeys_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
@@ -60581,6 +78827,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default;
+#else
bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60598,6 +78848,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
@@ -60609,37 +78862,46 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t releaseCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
const uint64_t* pReleaseKeys = {};
+
};
static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
+ {
+ using Type = Win32KeyedMutexAcquireReleaseInfoNV;
+ };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
-
struct Win32SurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {},
- HINSTANCE hinstance_ = {},
- HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , hinstance( hinstance_ )
- , hwnd( hwnd_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), hinstance( hinstance_ ), hwnd( hwnd_ )
{}
- VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) - offsetof( Win32SurfaceCreateInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
- Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
return *this;
}
@@ -60667,6 +78929,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
@@ -60677,6 +78940,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default;
+#else
bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60690,6 +78957,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
@@ -60697,243 +78967,163 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
HINSTANCE hinstance = {};
HWND hwnd = {};
+
};
static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
+ {
+ using Type = Win32SurfaceCreateInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct WriteDescriptorSet
+ struct WriteDescriptorSetAccelerationStructureKHR
{
- VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
- uint32_t dstBinding_ = {},
- uint32_t dstArrayElement_ = {},
- uint32_t descriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {},
- const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT
- : dstSet( dstSet_ )
- , dstBinding( dstBinding_ )
- , dstArrayElement( dstArrayElement_ )
- , descriptorCount( descriptorCount_ )
- , descriptorType( descriptorType_ )
- , pImageInfo( pImageInfo_ )
- , pBufferInfo( pBufferInfo_ )
- , pTexelBufferView( pTexelBufferView_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
+ : accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
{}
- VULKAN_HPP_NAMESPACE::WriteDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) - offsetof( WriteDescriptorSet, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
- WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>(&rhs);
- return *this;
- }
-
- WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ )
+ : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- dstSet = dstSet_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
return *this;
}
- WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- dstBinding = dstBinding_;
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( WriteDescriptorSetAccelerationStructureKHR ) );
return *this;
}
- WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- dstArrayElement = dstArrayElement_;
+ pNext = pNext_;
return *this;
}
- WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
{
- descriptorCount = descriptorCount_;
+ accelerationStructureCount = accelerationStructureCount_;
return *this;
}
- WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
- descriptorType = descriptorType_;
+ pAccelerationStructures = pAccelerationStructures_;
return *this;
}
- WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
- pImageInfo = pImageInfo_;
+ accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
+ pAccelerationStructures = accelerationStructures_.data();
return *this;
}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
- {
- pBufferInfo = pBufferInfo_;
- return *this;
- }
- WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT
{
- pTexelBufferView = pTexelBufferView_;
- return *this;
+ return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
}
- operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+ return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
}
- operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkWriteDescriptorSet*>( this );
- }
- bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default;
+#else
+ bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( dstSet == rhs.dstSet )
- && ( dstBinding == rhs.dstBinding )
- && ( dstArrayElement == rhs.dstArrayElement )
- && ( descriptorCount == rhs.descriptorCount )
- && ( descriptorType == rhs.descriptorType )
- && ( pImageInfo == rhs.pImageInfo )
- && ( pBufferInfo == rhs.pBufferInfo )
- && ( pTexelBufferView == rhs.pTexelBufferView );
+ && ( accelerationStructureCount == rhs.accelerationStructureCount )
+ && ( pAccelerationStructures == rhs.pAccelerationStructures );
}
- bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
- uint32_t dstBinding = {};
- uint32_t dstArrayElement = {};
- uint32_t descriptorCount = {};
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
- const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
- const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
- const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
+ uint32_t accelerationStructureCount = {};
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {};
+
};
- static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureKHR>::value, "struct wrapper is not a standard layout!" );
- struct WriteDescriptorSetAccelerationStructureNV
+ template <>
+ struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
{
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {},
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructureCount( accelerationStructureCount_ )
- , pAccelerationStructures( pAccelerationStructures_ )
- {}
-
- VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) - offsetof( WriteDescriptorSetAccelerationStructureNV, pNext ) );
- return *this;
- }
-
- WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>(&rhs);
- return *this;
- }
-
- WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
- {
- accelerationStructureCount = accelerationStructureCount_;
- return *this;
- }
-
- WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
- {
- pAccelerationStructures = pAccelerationStructures_;
- return *this;
- }
+ using Type = WriteDescriptorSetAccelerationStructureKHR;
+ };
+ using WriteDescriptorSetAccelerationStructureNV = WriteDescriptorSetAccelerationStructureKHR;
- operator VkWriteDescriptorSetAccelerationStructureNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
- }
+ struct WriteDescriptorSetInlineUniformBlockEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
- operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
- }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT(uint32_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+ : dataSize( dataSize_ ), pData( pData_ )
+ {}
- bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( accelerationStructureCount == rhs.accelerationStructureCount )
- && ( pAccelerationStructures == rhs.pAccelerationStructures );
- }
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ *this = rhs;
}
- public:
- const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
- const void* pNext = {};
- uint32_t accelerationStructureCount = {};
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures = {};
- };
- static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureNV>::value, "struct wrapper is not a standard layout!" );
-
- struct WriteDescriptorSetInlineUniformBlockEXT
- {
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {},
- const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT
- : dataSize( dataSize_ )
- , pData( pData_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
+ : dataSize( static_cast<uint32_t>( data_.size() * sizeof(T) ) ), pData( data_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetInlineUniformBlockEXT & operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>( &rhs );
return *this;
}
- WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = rhs;
- }
-
- WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>(&rhs);
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) );
return *this;
}
@@ -60955,6 +79145,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ WriteDescriptorSetInlineUniformBlockEXT & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataSize = static_cast<uint32_t>( data_.size() * sizeof(T) );
+ pData = data_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
@@ -60965,6 +79166,10 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default;
+#else
bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
@@ -60977,42 +79182,54 @@ namespace VULKAN_HPP_NAMESPACE
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
const void* pNext = {};
uint32_t dataSize = {};
const void* pData = {};
+
};
static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<WriteDescriptorSetInlineUniformBlockEXT>::value, "struct wrapper is not a standard layout!" );
-#ifdef VK_USE_PLATFORM_XCB_KHR
+ template <>
+ struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlockEXT>
+ {
+ using Type = WriteDescriptorSetInlineUniformBlockEXT;
+ };
+#ifdef VK_USE_PLATFORM_XCB_KHR
struct XcbSurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {},
- xcb_connection_t* connection_ = {},
- xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , connection( connection_ )
- , window( window_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t* connection_ = {}, xcb_window_t window_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), connection( connection_ ), window( window_ )
{}
- VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) - offsetof( XcbSurfaceCreateInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
return *this;
}
@@ -61040,6 +79257,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
@@ -61050,19 +79268,26 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default;
+#else
bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( connection == rhs.connection )
- && ( window == rhs.window );
+ && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
}
bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
@@ -61070,37 +79295,46 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
xcb_connection_t* connection = {};
xcb_window_t window = {};
+
};
static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
+ {
+ using Type = XcbSurfaceCreateInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#ifdef VK_USE_PLATFORM_XLIB_KHR
-
struct XlibSurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {},
- Display* dpy_ = {},
- Window window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , dpy( dpy_ )
- , window( window_ )
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display* dpy_ = {}, Window window_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), dpy( dpy_ ), window( window_ )
{}
- VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) - offsetof( XlibSurfaceCreateInfoKHR, pNext ) );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>(&rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( static_cast<void *>( this ), &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
return *this;
}
@@ -61128,6 +79362,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+
operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
@@ -61138,19 +79373,26 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
}
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default;
+#else
bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( dpy == rhs.dpy )
- && ( window == rhs.window );
+ && ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
}
bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
+#endif
+
+
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
@@ -61158,19 +79400,668 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
Display* dpy = {};
Window window = {};
+
};
static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
+ {
+ using Type = XlibSurfaceCreateInfoKHR;
+ };
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ class DebugReportCallbackEXT
+ {
+ public:
+ using CType = VkDebugReportCallbackEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT
+ : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
+ : m_debugReportCallbackEXT( debugReportCallbackEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT
+ {
+ m_debugReportCallbackEXT = debugReportCallbackEXT;
+ return *this;
+ }
+#endif
+
+ DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_debugReportCallbackEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugReportCallbackEXT const& ) const = default;
+#else
+ bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+ }
+
+ bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+ }
+
+ bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugReportCallbackEXT;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugReportCallbackEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDebugReportCallbackEXT m_debugReportCallbackEXT;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDebugReportCallbackEXT>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class DebugUtilsMessengerEXT
+ {
+ public:
+ using CType = VkDebugUtilsMessengerEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT
+ : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
+ : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT
+ {
+ m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
+ return *this;
+ }
+#endif
+
+ DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsMessengerEXT const& ) const = default;
+#else
+ bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
+ }
+
+ bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
+ }
+
+ bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugUtilsMessengerEXT;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDebugUtilsMessengerEXT>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
+ };
+
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ class Instance;
+ template <typename Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+ using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+ using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch> class UniqueHandleTraits<SurfaceKHR, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+ using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ class Instance
+ {
+ public:
+ using CType = VkInstance;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT
+ : m_instance(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_instance(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT
+ : m_instance( instance )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT
+ {
+ m_instance = instance;
+ return *this;
+ }
+#endif
+
+ Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_instance = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Instance const& ) const = default;
+#else
+ bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_instance == rhs.m_instance;
+ }
+
+ bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_instance != rhs.m_instance;
+ }
+
+ bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_instance < rhs.m_instance;
+ }
+#endif
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#ifdef VK_USE_PLATFORM_GGP
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_instance;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_instance != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_instance == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkInstance m_instance;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eInstance>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Instance;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eInstance>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Instance;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Instance;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Instance>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch> class UniqueHandleTraits<Instance, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
+ using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template<typename Dispatch>
- VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
{
VULKAN_HPP_NAMESPACE::Instance instance;
Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
@@ -61178,7 +80069,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
{
VULKAN_HPP_NAMESPACE::Instance instance;
Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
@@ -61190,13 +80081,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d )
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d )
{
std::vector<ExtensionProperties,Allocator> properties;
uint32_t propertyCount;
@@ -61217,8 +80108,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d )
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d )
{
std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -61242,13 +80133,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d )
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d )
{
std::vector<LayerProperties,Allocator> properties;
uint32_t propertyCount;
@@ -61269,8 +80160,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d )
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d )
{
std::vector<LayerProperties,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -61309,13 +80200,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" );
@@ -61422,7 +80313,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &counterBufferOffsets, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
@@ -61443,7 +80334,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const &descriptorSets, ArrayProxy<const uint32_t> const &dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
}
@@ -61479,6 +80370,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
@@ -61498,7 +80403,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &sizes, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
@@ -61535,7 +80440,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &offsets, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
@@ -61550,28 +80455,140 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ), reinterpret_cast<const VkDeviceSize*>( pSizes ), reinterpret_cast<const VkDeviceSize*>( pStrides ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &sizes, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &strides, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+ if ( buffers.size() != offsets.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == sizes.size() );
+#else
+ if ( buffers.size() != sizes.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == strides.size() );
+#else
+ if ( buffers.size() != strides.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( offsets.size() == sizes.size() );
+#else
+ if ( offsets.size() != sizes.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != sizes.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( offsets.size() == strides.size() );
+#else
+ if ( offsets.size() != strides.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != strides.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( sizes.size() == strides.size() );
+#else
+ if ( sizes.size() != strides.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: sizes.size() != strides.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), reinterpret_cast<const VkDeviceSize*>( offsets.data() ), reinterpret_cast<const VkDeviceSize*>( sizes.data() ), reinterpret_cast<const VkDeviceSize*>( strides.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const &regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+ d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR*>( pBlitImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+ d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR*>( &blitImageInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( pInfo ), static_cast<VkBuffer>( indirectBuffer ), static_cast<VkDeviceSize>( indirectOffset ), indirectStride );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( &info ), static_cast<VkBuffer>( indirectBuffer ), static_cast<VkDeviceSize>( indirectOffset ), indirectStride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR* const*>( ppOffsetInfos ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const &pOffsetInfos, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() );
+#else
+ if ( infos.size() != pOffsetInfos.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size() , reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR* const*>( pOffsetInfos.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -61582,7 +80599,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const &attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const &rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
}
@@ -61595,7 +80612,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const &ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
}
@@ -61608,26 +80625,56 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const &ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
+ d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
+ d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -61635,51 +80682,118 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const &regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR*>( pCopyBufferInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR*>( &copyBufferInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const &regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2KHR*>( pCopyBufferToImageInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2KHR*>( &copyBufferToImageInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const &regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR*>( pCopyImageInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR*>( &copyImageInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const &regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( pCopyImageToBufferInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( &copyImageToBufferInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -62089,7 +81203,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const &counterBufferOffsets, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
@@ -62110,12 +81224,25 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV*>( pGeneratedCommandsInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV*>( &generatedCommandsInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -62190,22 +81317,22 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const &bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
+ d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV*>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
+ d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV*>( &generatedCommandsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -62216,7 +81343,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const &values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
}
@@ -62229,7 +81356,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const &descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
}
@@ -62249,19 +81376,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
- {
- d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -62297,12 +81411,25 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const &regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR*>( pResolveImageInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR*>( &resolveImageInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -62338,7 +81465,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size() , reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( customSampleOrders.data() ) );
}
@@ -62346,6 +81473,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
@@ -62374,6 +81515,62 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
@@ -62407,7 +81604,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
}
@@ -62434,7 +81631,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast<const VkRect2D*>( exclusiveScissors.data() ) );
}
@@ -62442,6 +81639,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
@@ -62469,13 +81680,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( &markerInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" );
@@ -62483,13 +81694,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( pOverrideInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( &overrideInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" );
@@ -62497,19 +81708,33 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( &markerInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -62530,12 +81755,25 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const &scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -62552,6 +81790,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
@@ -62566,6 +81818,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
@@ -62585,7 +81851,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const &viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
}
@@ -62598,7 +81864,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast<const VkShadingRatePaletteNV*>( shadingRatePalettes.data() ) );
}
@@ -62611,12 +81877,55 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const &viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const &viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR*>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( pCallableShaderBindingTable ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR*>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( &missShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( &callableShaderBindingTable ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR*>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( pCallableShaderBindingTable ), width, height, depth );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR*>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( &missShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR*>( &callableShaderBindingTable ), width, height, depth );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -62638,7 +81947,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const &data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
}
@@ -62651,22 +81960,37 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const &events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const &bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureNV*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureKHR*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureKHR*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -62700,13 +82024,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" );
@@ -62731,13 +82055,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" );
@@ -62746,13 +82070,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( pAcquireInfo ), pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const
{
uint32_t imageIndex;
Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( &acquireInfo ), &imageIndex ) );
@@ -62761,13 +82085,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
uint32_t imageIndex;
Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
@@ -62776,13 +82100,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( pAcquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( pConfiguration ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( &configuration ) ) );
@@ -62791,13 +82115,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( &info ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" );
@@ -62805,137 +82129,133 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
{
std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount, vectorAllocator );
Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
- {
- static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle<CommandBuffer, Dispatch> ), "CommandBuffer is greater than UniqueHandle<CommandBuffer, Dispatch>!" );
- std::vector<UniqueHandle<CommandBuffer, Dispatch>, Allocator> commandBuffers;
- commandBuffers.reserve( allocateInfo.commandBufferCount );
- CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle<CommandBuffer, Dispatch> ) - sizeof( CommandBuffer ) ) );
- Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<CommandBuffer, Dispatch>, Allocator> uniqueCommandBuffers;
+ std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>(commandBuffers.data()) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
+ uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
{
- commandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( buffer[i], deleter ) );
+ uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
}
}
- return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle<CommandBuffer, Dispatch> ), "CommandBuffer is greater than UniqueHandle<CommandBuffer, Dispatch>!" );
- std::vector<UniqueHandle<CommandBuffer, Dispatch>, Allocator> commandBuffers( vectorAllocator );
- commandBuffers.reserve( allocateInfo.commandBufferCount );
- CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle<CommandBuffer, Dispatch> ) - sizeof( CommandBuffer ) ) );
- Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
+ std::vector<UniqueHandle<CommandBuffer, Dispatch>, Allocator> uniqueCommandBuffers( vectorAllocator );
+ std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>(commandBuffers.data()) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
+ uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
{
- commandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( buffer[i], deleter ) );
+ uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
}
}
- return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
{
std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator );
Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
- {
- static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle<DescriptorSet, Dispatch> ), "DescriptorSet is greater than UniqueHandle<DescriptorSet, Dispatch>!" );
- std::vector<UniqueHandle<DescriptorSet, Dispatch>, Allocator> descriptorSets;
- descriptorSets.reserve( allocateInfo.descriptorSetCount );
- DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle<DescriptorSet, Dispatch> ) - sizeof( DescriptorSet ) ) );
- Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<DescriptorSet, Dispatch>, Allocator> uniqueDescriptorSets;
+ std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>(descriptorSets.data()) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
+ uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
{
- descriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( buffer[i], deleter ) );
+ uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
}
}
- return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle<DescriptorSet, Dispatch> ), "DescriptorSet is greater than UniqueHandle<DescriptorSet, Dispatch>!" );
- std::vector<UniqueHandle<DescriptorSet, Dispatch>, Allocator> descriptorSets( vectorAllocator );
- descriptorSets.reserve( allocateInfo.descriptorSetCount );
- DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle<DescriptorSet, Dispatch> ) - sizeof( DescriptorSet ) ) );
- Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
+ std::vector<UniqueHandle<DescriptorSet, Dispatch>, Allocator> uniqueDescriptorSets( vectorAllocator );
+ std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>(descriptorSets.data()) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
+ uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
{
- descriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( buffer[i], deleter ) );
+ uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
}
}
- return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DeviceMemory memory;
Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
@@ -62943,7 +82263,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DeviceMemory memory;
Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
@@ -62954,29 +82274,45 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR*>( pBindInfos ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const &bindInfos, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR*>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const &bindInfos, Dispatch const &d ) const
{
- Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( bindInfos.data() ) ) );
+ Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" );
@@ -62984,13 +82320,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const &bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" );
@@ -62998,13 +82334,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const &bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" );
@@ -63013,13 +82349,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" );
@@ -63027,13 +82363,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const &bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" );
@@ -63041,34 +82377,134 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const &bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkBuildAccelerationStructureKHR( m_device, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR* const*>( ppOffsetInfos ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const &pOffsetInfos, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() );
+#else
+ if ( infos.size() != pOffsetInfos.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ Result result = static_cast<Result>( d.vkBuildAccelerationStructureKHR( m_device, infos.size() , reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR* const*>( pOffsetInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::buildAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( pInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( &info ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( pInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( &info ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureToMemoryKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( pInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( &info ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyMemoryToAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkAccelerationStructureKHR*>( pAccelerationStructure ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+ Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR*>( &accelerationStructure ) ) );
+ return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<AccelerationStructureKHR,Dispatch>>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+ Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR*>( &accelerationStructure ) ) );
+
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<AccelerationStructureKHR,Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -63096,13 +82532,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Buffer buffer;
Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
@@ -63110,7 +82546,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Buffer buffer;
Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
@@ -63122,13 +82558,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::BufferView view;
Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
@@ -63136,7 +82572,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::BufferView view;
Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
@@ -63148,13 +82584,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::CommandPool commandPool;
Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
@@ -63162,7 +82598,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::CommandPool commandPool;
Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
@@ -63174,91 +82610,117 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<Pipeline>::type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
- return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
- {
- static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
- std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
- pipelines.reserve( createInfos.size() );
- Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
- Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
+ uniquePipelines.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
}
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
- std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
- pipelines.reserve( createInfos.size() );
- Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
- Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines( vectorAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
+ uniquePipelines.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
}
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline,Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter );
+ return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR*>( pDeferredOperation ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+ Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR*>( &deferredOperation ) ) );
+ return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeferredOperationKHR,Dispatch>>::type Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+ Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR*>( &deferredOperation ) ) );
+
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<DeferredOperationKHR,Dispatch>( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
@@ -63266,7 +82728,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
@@ -63278,13 +82740,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
@@ -63292,7 +82754,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
@@ -63304,13 +82766,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
@@ -63318,7 +82780,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
@@ -63330,13 +82792,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
@@ -63344,7 +82806,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
@@ -63356,13 +82818,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Event event;
Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
@@ -63370,7 +82832,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Event,Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Event,Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Event event;
Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
@@ -63382,13 +82844,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Fence fence;
Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
@@ -63396,7 +82858,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Fence fence;
Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
@@ -63408,13 +82870,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
@@ -63422,7 +82884,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
@@ -63434,91 +82896,89 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<Pipeline>::type Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
- return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
- {
- static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
- std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
- pipelines.reserve( createInfos.size() );
- Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
- Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
+ uniquePipelines.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
}
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
- std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
- pipelines.reserve( createInfos.size() );
- Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
- Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines( vectorAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
+ uniquePipelines.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
}
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline,Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter );
+ return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Image image;
Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
@@ -63526,7 +82986,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Image,Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Image,Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Image image;
Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
@@ -63538,13 +82998,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ImageView view;
Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
@@ -63552,7 +83012,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ImageView view;
Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
@@ -63564,117 +83024,117 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
+ return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNV*>( pIndirectCommandsLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
- Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
- return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" );
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+ Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV*>( &indirectCommandsLayout ) ) );
+ return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNV" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNV,Dispatch>>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
- Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+ Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV*>( &indirectCommandsLayout ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<IndirectCommandsLayoutNVX,Dispatch>( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter );
+ return createResultValue<IndirectCommandsLayoutNV,Dispatch>( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVUnique", deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
+ return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
- Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
- return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" );
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+ Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+ return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
- Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+ Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<ObjectTableNVX,Dispatch>( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter );
+ return createResultValue<PipelineCache,Dispatch>( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
+ return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
- Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
- return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" );
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+ Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+ return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
- Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+ Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<PipelineCache,Dispatch>( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter );
+ return createResultValue<PipelineLayout,Dispatch>( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
+ return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPrivateDataSlotEXT*>( pPrivateDataSlot ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
- Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
- return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" );
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
+ Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPrivateDataSlotEXT*>( &privateDataSlot ) ) );
+ return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING"::Device::createPrivateDataSlotEXT" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PrivateDataSlotEXT,Dispatch>>::type Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
- Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
+ Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPrivateDataSlotEXT*>( &privateDataSlot ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<PipelineLayout,Dispatch>( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter );
+ return createResultValue<PrivateDataSlotEXT,Dispatch>( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING"::Device::createPrivateDataSlotEXTUnique", deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::QueryPool queryPool;
Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
@@ -63682,7 +83142,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::QueryPool queryPool;
Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
@@ -63693,92 +83153,168 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } );
+ }
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ {
+ std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } );
+ }
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ for ( size_t i=0 ; i<createInfos.size() ; i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines( vectorAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ for ( size_t i=0 ; i<createInfos.size() ; i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline,Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHRUnique", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT }, deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline,Allocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<Pipeline>::type Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
- return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
- {
- static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
- std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
- pipelines.reserve( createInfos.size() );
- Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
- Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
+ uniquePipelines.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
}
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
- std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
- pipelines.reserve( createInfos.size() );
- Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
- Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> uniquePipelines( vectorAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>(pipelines.data()) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
+ uniquePipelines.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
}
- return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
+ return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline,Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter );
+ return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter );
}
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
@@ -63786,7 +83322,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
@@ -63798,13 +83334,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
@@ -63812,7 +83348,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
@@ -63824,13 +83360,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
@@ -63838,7 +83374,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
@@ -63850,13 +83386,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Sampler sampler;
Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
@@ -63864,7 +83400,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Sampler sampler;
Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
@@ -63876,13 +83412,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
@@ -63890,7 +83426,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
@@ -63902,13 +83438,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
@@ -63916,7 +83452,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
@@ -63928,13 +83464,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Semaphore semaphore;
Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
@@ -63942,7 +83478,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Semaphore semaphore;
Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
@@ -63954,13 +83490,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
@@ -63968,7 +83504,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
@@ -63980,73 +83516,71 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
- {
- static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle<SwapchainKHR, Dispatch> ), "SwapchainKHR is greater than UniqueHandle<SwapchainKHR, Dispatch>!" );
- std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> swapchainKHRs;
- swapchainKHRs.reserve( createInfos.size() );
- SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle<SwapchainKHR, Dispatch> ) - sizeof( SwapchainKHR ) ) );
- Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
+ template<typename Dispatch, typename Allocator >
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> uniqueSwapchainKHRs;
+ std::vector<SwapchainKHR> swapchainKHRs( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>(swapchainKHRs.data()) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
+ uniqueSwapchainKHRs.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- swapchainKHRs.push_back( UniqueHandle<SwapchainKHR, Dispatch>( buffer[i], deleter ) );
+ uniqueSwapchainKHRs.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchainKHRs[i], deleter ) );
}
}
- return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ return createResultValue( result, std::move( uniqueSwapchainKHRs ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Dispatch, typename Allocator , typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const &createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle<SwapchainKHR, Dispatch> ), "SwapchainKHR is greater than UniqueHandle<SwapchainKHR, Dispatch>!" );
- std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> swapchainKHRs( vectorAllocator );
- swapchainKHRs.reserve( createInfos.size() );
- SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle<SwapchainKHR, Dispatch> ) - sizeof( SwapchainKHR ) ) );
- Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
+ std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> uniqueSwapchainKHRs( vectorAllocator );
+ std::vector<SwapchainKHR> swapchainKHRs( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>(swapchainKHRs.data()) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
+ uniqueSwapchainKHRs.reserve( createInfos.size() );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
{
- swapchainKHRs.push_back( UniqueHandle<SwapchainKHR, Dispatch>( buffer[i], deleter ) );
+ uniqueSwapchainKHRs.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchainKHRs[i], deleter ) );
}
}
- return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ return createResultValue( result, std::move( uniqueSwapchainKHRs ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
@@ -64058,13 +83592,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
@@ -64072,7 +83606,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
@@ -64110,13 +83644,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" );
@@ -64124,42 +83658,84 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ }
+#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const
{
- d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ Result result = static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::deferredOperationJoinKHR", { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+#else
+ d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+#else
+ d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -64241,6 +83817,36 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -64307,28 +83913,28 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -64476,54 +84082,28 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -64606,6 +84186,32 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
@@ -64697,28 +84303,28 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -64828,13 +84434,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" );
@@ -64856,13 +84462,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const &memoryRanges, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" );
@@ -64876,7 +84482,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
@@ -64889,7 +84495,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
@@ -64902,7 +84508,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const &descriptorSets, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" );
@@ -64916,7 +84522,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const &descriptorSets, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" );
@@ -64949,20 +84555,84 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
+ return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d ) const
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), dataSize, pData ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy<T> const &data, Dispatch const &d ) const
{
- Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+ Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" );
+
+ }
+
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const &d ) const
+ {
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+ }
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const &d ) const
+ {
+ T data;
+ Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 1 * sizeof( T ) , reinterpret_cast<void*>( &data ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+ template<typename X, typename Y, typename ...Z, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -64988,20 +84658,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
}
template<typename X, typename Y, typename ...Z, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
@@ -65138,13 +84808,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const &timestampInfos, ArrayProxy<uint64_t> const &timestamps, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
@@ -65160,6 +84830,39 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getDeferredOperationResultKHR", { Result::eSuccess, Result::eNotReady } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -65206,6 +84909,22 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionKHR*>( version ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionKHR*>( &version ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureCompatibilityKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template<typename Dispatch>
VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
@@ -65237,13 +84956,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( pDeviceGroupPresentCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( &deviceGroupPresentCapabilities ) ) );
@@ -65253,13 +84972,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
@@ -65269,13 +84988,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
@@ -65369,13 +85088,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
@@ -65383,13 +85102,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
{
int fd;
Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( &getFdInfo ), &fd ) );
@@ -65399,13 +85118,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
@@ -65414,13 +85133,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
@@ -65430,6 +85149,29 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+ template<typename X, typename Y, typename ...Z, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( pProperties ) ) );
@@ -65511,7 +85253,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
@@ -65521,7 +85263,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
return sparseMemoryRequirements;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type>
VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements( vectorAllocator );
@@ -65539,7 +85281,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
@@ -65549,7 +85291,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
return sparseMemoryRequirements;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements( vectorAllocator );
@@ -65567,7 +85309,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
@@ -65577,7 +85319,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
return sparseMemoryRequirements;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements( vectorAllocator );
@@ -65605,6 +85347,21 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
+ Result result = static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX*>( &properties ) ) );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageViewAddressNVX" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX*>( pInfo ) );
@@ -65619,13 +85376,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( pInfo ), pBuffer ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
{
struct AHardwareBuffer* buffer;
Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( &info ), &buffer ) );
@@ -65635,13 +85392,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
{
int fd;
Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( &getFdInfo ), &fd ) );
@@ -65650,13 +85407,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( pMemoryFdProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( &memoryFdProperties ) ) );
@@ -65665,13 +85422,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( pMemoryHostPointerProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( &memoryHostPointerProperties ) ) );
@@ -65681,13 +85438,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
@@ -65698,13 +85455,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
@@ -65715,13 +85472,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( pMemoryWin32HandleProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( &memoryWin32HandleProperties ) ) );
@@ -65731,13 +85488,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( pPresentationTimings ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings;
uint32_t presentationTimingCount;
@@ -65758,8 +85515,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings( vectorAllocator );
uint32_t presentationTimingCount;
@@ -65783,13 +85540,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL*>( pValue ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
Result result = static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL*>( &value ) ) );
@@ -65798,13 +85555,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data;
size_t dataSize;
@@ -65825,8 +85582,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data( vectorAllocator );
size_t dataSize;
@@ -65850,13 +85607,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( pInternalRepresentations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
{
std::vector<PipelineExecutableInternalRepresentationKHR,Allocator> internalRepresentations;
uint32_t internalRepresentationCount;
@@ -65877,8 +85634,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PipelineExecutableInternalRepresentationKHR,Allocator> internalRepresentations( vectorAllocator );
uint32_t internalRepresentationCount;
@@ -65902,13 +85659,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( pPipelineInfo ), pExecutableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const
{
std::vector<PipelineExecutablePropertiesKHR,Allocator> properties;
uint32_t executableCount;
@@ -65929,8 +85686,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PipelineExecutablePropertiesKHR,Allocator> properties( vectorAllocator );
uint32_t executableCount;
@@ -65954,13 +85711,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pStatisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( pStatistics ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
{
std::vector<PipelineExecutableStatisticKHR,Allocator> statistics;
uint32_t statisticCount;
@@ -65981,8 +85738,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PipelineExecutableStatisticKHR,Allocator> statistics( vectorAllocator );
uint32_t statisticCount;
@@ -66006,41 +85763,183 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+ d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+ uint64_t data;
+ d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), &data );
+ return data;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+
+ }
+
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T,Allocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
+ {
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+ }
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
+ {
+ T data;
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, 1 * sizeof( T ) , reinterpret_cast<void*>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
+
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d ) const
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+
+ }
+
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const
+ {
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+ }
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const
+ {
+ T data;
+ Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast<void*>( &data ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" );
+
+ }
+
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const
+ {
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+ }
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const
+ {
+ T data;
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast<void*>( &data ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d ) const
{
- Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" );
+
+ }
+
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const
+ {
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+ }
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const
+ {
+ T data;
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast<void*>( &data ) ) );
+
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( pDisplayTimingProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( &displayTimingProperties ) ) );
@@ -66064,13 +85963,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
{
uint64_t value;
Result result = static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
@@ -66079,13 +85978,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
{
uint64_t value;
Result result = static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
@@ -66094,13 +85993,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
{
int fd;
Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( &getFdInfo ), &fd ) );
@@ -66110,13 +86009,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
@@ -66126,13 +86025,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> info;
size_t infoSize;
@@ -66153,8 +86052,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> info( vectorAllocator );
size_t infoSize;
@@ -66178,13 +86077,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
{
uint64_t counterValue;
Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
@@ -66193,13 +86092,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
std::vector<Image,Allocator> swapchainImages;
uint32_t swapchainImageCount;
@@ -66220,8 +86119,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Image,Allocator> swapchainImages( vectorAllocator );
uint32_t swapchainImageCount;
@@ -66246,13 +86145,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
@@ -66260,13 +86159,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data;
size_t dataSize;
@@ -66287,8 +86186,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data( vectorAllocator );
size_t dataSize;
@@ -66312,13 +86211,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( pImportFenceFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( &importFenceFdInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" );
@@ -66327,13 +86226,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( pImportFenceWin32HandleInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( &importFenceWin32HandleInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" );
@@ -66342,13 +86241,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( pImportSemaphoreFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( &importSemaphoreFdInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" );
@@ -66357,13 +86256,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( pImportSemaphoreWin32HandleInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( &importSemaphoreWin32HandleInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" );
@@ -66372,13 +86271,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( pInitializeInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( &initializeInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" );
@@ -66386,13 +86285,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const &memoryRanges, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" );
@@ -66400,13 +86299,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void*>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void*>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const
{
void* pData;
Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData ) );
@@ -66415,13 +86314,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> srcCaches, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const &srcCaches, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" );
@@ -66429,13 +86328,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT*>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> srcCaches, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const &srcCaches, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size() , reinterpret_cast<const VkValidationCacheEXT*>( srcCaches.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" );
@@ -66494,38 +86393,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
- {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() );
-#else
- if ( pObjectTableEntries.size() != objectIndices.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
- }
-#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- Result result = static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
#ifdef VK_USE_PLATFORM_WIN32_KHR
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" );
@@ -66535,13 +86412,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" );
@@ -66614,7 +86491,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const &fences, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" );
@@ -66650,13 +86527,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( &nameInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" );
@@ -66664,13 +86541,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( &tagInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" );
@@ -66679,13 +86556,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" );
@@ -66699,7 +86576,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> metadata, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const &swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const &metadata, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
@@ -66727,14 +86604,29 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), data ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), data ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setPrivateDataEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( &signalInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphore" );
@@ -66742,13 +86634,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( &signalInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" );
@@ -66811,28 +86703,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
- {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() );
-#else
- if ( objectEntryTypes.size() != objectIndices.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
- }
-#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- Result result = static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -66868,20 +86738,20 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const &descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const &descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const &fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
@@ -66889,13 +86759,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( &waitInfo ), timeout ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } );
@@ -66903,28 +86773,44 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( &waitInfo ), timeout ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy<T> const &data, size_t stride, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureKHR*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -66932,7 +86818,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -66996,14 +86882,42 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDirectFBSurfaceEXT" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDirectFBSurfaceEXTUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67011,7 +86925,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67023,13 +86937,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67037,7 +86951,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67050,13 +86964,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_IOS_MVK
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67064,7 +86978,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67078,13 +86992,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_FUCHSIA
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67092,7 +87006,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67106,13 +87020,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_MACOS_MVK
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67120,7 +87034,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67134,13 +87048,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_METAL_EXT
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67148,7 +87062,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67162,13 +87076,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_GGP
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67176,7 +87090,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67190,13 +87104,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_VI_NN
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67204,7 +87118,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67218,13 +87132,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67232,7 +87146,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67246,13 +87160,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67260,7 +87174,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67274,13 +87188,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XCB_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67288,7 +87202,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67302,13 +87216,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67316,7 +87230,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
@@ -67441,13 +87355,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const
{
std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
uint32_t physicalDeviceGroupCount;
@@ -67468,8 +87382,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
uint32_t physicalDeviceGroupCount;
@@ -67493,13 +87407,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const
{
std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
uint32_t physicalDeviceGroupCount;
@@ -67520,8 +87434,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
uint32_t physicalDeviceGroupCount;
@@ -67545,13 +87459,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const
{
std::vector<PhysicalDevice,Allocator> physicalDevices;
uint32_t physicalDeviceCount;
@@ -67572,8 +87486,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PhysicalDevice,Allocator> physicalDevices( vectorAllocator );
uint32_t physicalDeviceCount;
@@ -67624,29 +87538,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
- Display dpy;
Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Device device;
Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
@@ -67654,7 +87567,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Device,Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Device,Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Device device;
Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
@@ -67666,13 +87579,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
@@ -67681,13 +87594,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d ) const
{
std::vector<ExtensionProperties,Allocator> properties;
uint32_t propertyCount;
@@ -67708,8 +87621,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -67733,13 +87646,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const
{
std::vector<LayerProperties,Allocator> properties;
uint32_t propertyCount;
@@ -67760,8 +87673,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<LayerProperties,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -67784,16 +87697,19 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR*>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( pCounterDescriptions ) ) );
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR*>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( pCounterDescriptions ) ) );
}
+
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Dispatch const &d ) const
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Dispatch const &d ) const
{
- std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions;
+ std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions;
uint32_t counterCount;
Result result;
do
@@ -67811,11 +87727,14 @@ namespace VULKAN_HPP_NAMESPACE
counterDescriptions.resize( counterCount );
}
return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Allocator const& vectorAllocator, Dispatch const &d ) const
+
+ template <typename Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Allocator const& vectorAllocator, Dispatch const &d ) const
{
- std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions( vectorAllocator );
+ std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions( vectorAllocator );
uint32_t counterCount;
Result result;
do
@@ -67833,17 +87752,68 @@ namespace VULKAN_HPP_NAMESPACE
counterDescriptions.resize( counterCount );
}
return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+
+ }
+
+ template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
+ {
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> enumeratedData;
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ enumeratedData.first.resize( counterCount );
+ enumeratedData.second.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( enumeratedData.first.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( enumeratedData.second.data() ) ) );
+ VULKAN_HPP_ASSERT( counterCount <= enumeratedData.first.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( counterCount < enumeratedData.first.size() ) )
+ {
+ enumeratedData.first.resize( counterCount );
+ enumeratedData.second.resize( counterCount );
+ }
+ return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ }
+
+ template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch, typename B1, typename B2, typename std::enable_if < std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type >
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & countersAllocator, PerformanceCounterDescriptionKHRAllocator & counterDescriptionsAllocator, Dispatch const & d ) const
+ {
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> enumeratedData( std::piecewise_construct, std::forward_as_tuple( countersAllocator ), std::forward_as_tuple( counterDescriptionsAllocator ) );
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ enumeratedData.first.resize( counterCount );
+ enumeratedData.second.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( enumeratedData.first.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( enumeratedData.second.data() ) ) );
+ VULKAN_HPP_ASSERT( counterCount <= enumeratedData.first.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( counterCount < enumeratedData.first.size() ) )
+ {
+ enumeratedData.first.resize( counterCount );
+ enumeratedData.second.resize( counterCount );
+ }
+ return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
std::vector<DisplayModeProperties2KHR,Allocator> properties;
uint32_t propertyCount;
@@ -67864,8 +87834,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayModeProperties2KHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -67889,13 +87859,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
std::vector<DisplayModePropertiesKHR,Allocator> properties;
uint32_t propertyCount;
@@ -67916,8 +87886,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayModePropertiesKHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -67941,13 +87911,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( &capabilities ) ) );
@@ -67956,13 +87926,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
@@ -67971,13 +87941,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const
{
std::vector<DisplayKHR,Allocator> displays;
uint32_t displayCount;
@@ -67998,8 +87968,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayKHR,Allocator> displays( vectorAllocator );
uint32_t displayCount;
@@ -68023,13 +87993,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( pTimeDomains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const
{
std::vector<TimeDomainEXT,Allocator> timeDomains;
uint32_t timeDomainCount;
@@ -68050,8 +88020,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<TimeDomainEXT,Allocator> timeDomains( vectorAllocator );
uint32_t timeDomainCount;
@@ -68075,13 +88045,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const
{
std::vector<CooperativeMatrixPropertiesNV,Allocator> properties;
uint32_t propertyCount;
@@ -68102,8 +88072,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<CooperativeMatrixPropertiesNV,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -68126,14 +88096,29 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const
{
std::vector<DisplayPlaneProperties2KHR,Allocator> properties;
uint32_t propertyCount;
@@ -68154,8 +88139,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayPlaneProperties2KHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -68179,13 +88164,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const
{
std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
uint32_t propertyCount;
@@ -68206,8 +88191,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayPlanePropertiesKHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -68231,13 +88216,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const
{
std::vector<DisplayProperties2KHR,Allocator> properties;
uint32_t propertyCount;
@@ -68258,8 +88243,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayProperties2KHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -68283,13 +88268,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const
{
std::vector<DisplayPropertiesKHR,Allocator> properties;
uint32_t propertyCount;
@@ -68310,8 +88295,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayPropertiesKHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -68395,13 +88380,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
@@ -68562,28 +88547,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Dispatch>
- VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX limits;
- d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
- return limits;
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
@@ -68592,20 +88562,20 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
}
template<typename X, typename Y, typename ...Z, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
@@ -68615,20 +88585,20 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
}
template<typename X, typename Y, typename ...Z, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
@@ -68714,13 +88684,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
std::vector<Rect2D,Allocator> rects;
uint32_t rectCount;
@@ -68741,8 +88711,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Rect2D,Allocator> rects( vectorAllocator );
uint32_t rectCount;
@@ -68847,7 +88817,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const
{
std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
@@ -68857,7 +88827,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
return queueFamilyProperties;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type>
VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties( vectorAllocator );
@@ -68875,7 +88845,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
{
std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
@@ -68885,7 +88855,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
return queueFamilyProperties;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
@@ -68895,7 +88865,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
return queueFamilyProperties;
}
- template<typename StructureChain, typename Allocator, typename Dispatch>
+ template<typename StructureChain, typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
{
std::vector<StructureChain,Allocator> queueFamilyProperties;
@@ -68914,7 +88884,7 @@ namespace VULKAN_HPP_NAMESPACE
}
return queueFamilyProperties;
}
- template<typename StructureChain, typename Allocator, typename Dispatch>
+ template<typename StructureChain, typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<StructureChain,Allocator> queueFamilyProperties( vectorAllocator );
@@ -68941,7 +88911,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
{
std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
@@ -68951,7 +88921,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
return queueFamilyProperties;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
@@ -68961,7 +88931,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
return queueFamilyProperties;
}
- template<typename StructureChain, typename Allocator, typename Dispatch>
+ template<typename StructureChain, typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
{
std::vector<StructureChain,Allocator> queueFamilyProperties;
@@ -68980,7 +88950,7 @@ namespace VULKAN_HPP_NAMESPACE
}
return queueFamilyProperties;
}
- template<typename StructureChain, typename Allocator, typename Dispatch>
+ template<typename StructureChain, typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<StructureChain,Allocator> queueFamilyProperties( vectorAllocator );
@@ -69007,7 +88977,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties,Allocator> properties;
@@ -69017,7 +88987,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
return properties;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type>
VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties,Allocator> properties( vectorAllocator );
@@ -69035,7 +89005,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties2,Allocator> properties;
@@ -69045,7 +89015,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
return properties;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties2,Allocator> properties( vectorAllocator );
@@ -69063,7 +89033,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties2,Allocator> properties;
@@ -69073,7 +89043,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
return properties;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties2,Allocator> properties( vectorAllocator );
@@ -69086,13 +89056,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( pCombinations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const
{
std::vector<FramebufferMixedSamplesCombinationNV,Allocator> combinations;
uint32_t combinationCount;
@@ -69113,8 +89083,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<FramebufferMixedSamplesCombinationNV,Allocator> combinations( vectorAllocator );
uint32_t combinationCount;
@@ -69138,13 +89108,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
@@ -69153,20 +89123,20 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
}
template<typename X, typename Y, typename ...Z, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
@@ -69176,13 +89146,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
@@ -69191,13 +89161,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats;
uint32_t surfaceFormatCount;
@@ -69218,8 +89188,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats( vectorAllocator );
uint32_t surfaceFormatCount;
@@ -69243,13 +89213,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
uint32_t surfaceFormatCount;
@@ -69270,8 +89240,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SurfaceFormatKHR,Allocator> surfaceFormats( vectorAllocator );
uint32_t surfaceFormatCount;
@@ -69296,13 +89266,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
std::vector<PresentModeKHR,Allocator> presentModes;
uint32_t presentModeCount;
@@ -69323,8 +89293,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
uint32_t presentModeCount;
@@ -69349,13 +89319,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
std::vector<PresentModeKHR,Allocator> presentModes;
uint32_t presentModeCount;
@@ -69376,8 +89346,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
uint32_t presentModeCount;
@@ -69401,13 +89371,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32*>( pSupported ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
VULKAN_HPP_NAMESPACE::Bool32 supported;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32*>( &supported ) ) );
@@ -69416,13 +89386,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( pToolProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const
{
std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties;
uint32_t toolCount;
@@ -69443,8 +89413,8 @@ namespace VULKAN_HPP_NAMESPACE
}
return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
}
- template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties( vectorAllocator );
uint32_t toolCount;
@@ -69566,7 +89536,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( pCheckpointData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch>
VULKAN_HPP_INLINE std::vector<CheckpointDataNV,Allocator> Queue::getCheckpointDataNV(Dispatch const &d ) const
{
std::vector<CheckpointDataNV,Allocator> checkpointData;
@@ -69576,7 +89546,7 @@ namespace VULKAN_HPP_NAMESPACE
d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( checkpointData.data() ) );
return checkpointData;
}
- template<typename Allocator, typename Dispatch>
+ template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type>
VULKAN_HPP_INLINE std::vector<CheckpointDataNV,Allocator> Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<CheckpointDataNV,Allocator> checkpointData( vectorAllocator );
@@ -69602,13 +89572,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const &bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" );
@@ -69643,13 +89613,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
@@ -69658,13 +89628,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" );
@@ -69672,13 +89642,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const &submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" );
@@ -69687,364 +89657,437 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template <> struct isStructureChainValid<AndroidHardwareBufferPropertiesANDROID, AndroidHardwareBufferFormatPropertiesANDROID>{ enum { value = true }; };
+ template <> struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template <> struct isStructureChainValid<ImageFormatProperties2, AndroidHardwareBufferUsageANDROID>{ enum { value = true }; };
+ template <> struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- template <> struct isStructureChainValid<AttachmentDescription2, AttachmentDescriptionStencilLayout>{ enum { value = true }; };
- template <> struct isStructureChainValid<AttachmentReference2, AttachmentReferenceStencilLayout>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindBufferMemoryInfo, BindBufferMemoryDeviceGroupInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemoryDeviceGroupInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemorySwapchainInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindImageMemoryInfo, BindImagePlaneMemoryInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<BufferCreateInfo, BufferDeviceAddressCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<BufferCreateInfo, BufferOpaqueCaptureAddressCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<CommandBufferInheritanceInfo, CommandBufferInheritanceConditionalRenderingInfoEXT>{ enum { value = true }; };
+ template <> struct StructExtends<AttachmentDescriptionStencilLayout, AttachmentDescription2>{ enum { value = true }; };
+ template <> struct StructExtends<AttachmentReferenceStencilLayout, AttachmentReference2>{ enum { value = true }; };
+ template <> struct StructExtends<BindBufferMemoryDeviceGroupInfo, BindBufferMemoryInfo>{ enum { value = true }; };
+ template <> struct StructExtends<BindImageMemoryDeviceGroupInfo, BindImageMemoryInfo>{ enum { value = true }; };
+ template <> struct StructExtends<BindImageMemorySwapchainInfoKHR, BindImageMemoryInfo>{ enum { value = true }; };
+ template <> struct StructExtends<BindImagePlaneMemoryInfo, BindImageMemoryInfo>{ enum { value = true }; };
+ template <> struct StructExtends<BufferDeviceAddressCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<BufferOpaqueCaptureAddressCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<CommandBufferInheritanceConditionalRenderingInfoEXT, CommandBufferInheritanceInfo>{ enum { value = true }; };
+ template <> struct StructExtends<CommandBufferInheritanceRenderPassTransformInfoQCOM, CommandBufferInheritanceInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<SubmitInfo, D3D12FenceSubmitInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<D3D12FenceSubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <> struct isStructureChainValid<InstanceCreateInfo, DebugReportCallbackCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<InstanceCreateInfo, DebugUtilsMessengerCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<BufferCreateInfo, DedicatedAllocationBufferCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, DedicatedAllocationImageCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryAllocateInfo, DedicatedAllocationMemoryAllocateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorPoolCreateInfo, DescriptorPoolInlineUniformBlockCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorSetLayoutCreateInfo, DescriptorSetLayoutBindingFlagsCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorSetAllocateInfo, DescriptorSetVariableDescriptorCountAllocateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorSetLayoutSupport, DescriptorSetVariableDescriptorCountLayoutSupport>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindSparseInfo, DeviceGroupBindSparseInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<CommandBufferBeginInfo, DeviceGroupCommandBufferBeginInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, DeviceGroupDeviceCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<PresentInfoKHR, DeviceGroupPresentInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassBeginInfo, DeviceGroupRenderPassBeginInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubmitInfo, DeviceGroupSubmitInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, DeviceGroupSwapchainCreateInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, DeviceMemoryOverallocationCreateInfoAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceQueueCreateInfo, DeviceQueueGlobalPriorityCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SurfaceCapabilities2KHR, DisplayNativeHdrSurfaceCapabilitiesAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<PresentInfoKHR, DisplayPresentInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<FormatProperties2, DrmFormatModifierPropertiesListEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DebugReportCallbackCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DebugUtilsMessengerCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DedicatedAllocationBufferCreateInfoNV, BufferCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DedicatedAllocationImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DedicatedAllocationMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct StructExtends<DeferredOperationInfoKHR, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DeferredOperationInfoKHR, AccelerationStructureBuildGeometryInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DeferredOperationInfoKHR, CopyAccelerationStructureInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DeferredOperationInfoKHR, CopyMemoryToAccelerationStructureInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DeferredOperationInfoKHR, CopyAccelerationStructureToMemoryInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ template <> struct StructExtends<DescriptorPoolInlineUniformBlockCreateInfoEXT, DescriptorPoolCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DescriptorSetLayoutBindingFlagsCreateInfo, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DescriptorSetVariableDescriptorCountAllocateInfo, DescriptorSetAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DescriptorSetVariableDescriptorCountLayoutSupport, DescriptorSetLayoutSupport>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceDiagnosticsConfigCreateInfoNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupBindSparseInfo, BindSparseInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupCommandBufferBeginInfo, CommandBufferBeginInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupDeviceCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupSubmitInfo, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceGroupSwapchainCreateInfoKHR, SwapchainCreateInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceMemoryOverallocationCreateInfoAMD, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DevicePrivateDataCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DeviceQueueGlobalPriorityCreateInfoEXT, DeviceQueueCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<DisplayNativeHdrSurfaceCapabilitiesAMD, SurfaceCapabilities2KHR>{ enum { value = true }; };
+ template <> struct StructExtends<DisplayPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<DrmFormatModifierPropertiesListEXT, FormatProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<ExportFenceCreateInfo, FenceCreateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceWin32HandleInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfoNV>{ enum { value = true }; };
+ template <> struct StructExtends<ExportMemoryAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<ExportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoNV>{ enum { value = true }; };
+ template <> struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ExportSemaphoreCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreWin32HandleInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template <> struct isStructureChainValid<ImageCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
- template <> struct isStructureChainValid<SamplerYcbcrConversionCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
+ template <> struct StructExtends<ExternalFormatANDROID, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ExternalFormatANDROID, SamplerYcbcrConversionCreateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- template <> struct isStructureChainValid<ImageFormatProperties2, ExternalImageFormatProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<BufferCreateInfo, ExternalMemoryBufferCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageFormatProperties2, FilterCubicImageViewImageFormatPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<FramebufferCreateInfo, FramebufferAttachmentsCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierExplicitCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierListCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageFormatListCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, ImageFormatListCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageFormatListCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageMemoryRequirementsInfo2, ImagePlaneMemoryRequirementsInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageStencilUsageCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageStencilUsageCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageSwapchainCreateInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewASTCDecodeModeEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewUsageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ExternalImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<ExternalMemoryBufferCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ExternalMemoryImageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<FilterCubicImageViewImageFormatPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<FramebufferAttachmentsCreateInfo, FramebufferCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<GraphicsPipelineShaderGroupsCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageDrmFormatModifierExplicitCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageDrmFormatModifierListCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageFormatListCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageFormatListCreateInfo, SwapchainCreateInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<ImageFormatListCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<ImagePlaneMemoryRequirementsInfo, ImageMemoryRequirementsInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<ImageStencilUsageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageStencilUsageCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<ImageSwapchainCreateInfoKHR, ImageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImageViewUsageCreateInfo, ImageViewCreateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- template <> struct isStructureChainValid<MemoryAllocateInfo, ImportAndroidHardwareBufferInfoANDROID>{ enum { value = true }; };
+ template <> struct StructExtends<ImportAndroidHardwareBufferInfoANDROID, MemoryAllocateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryFdInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryHostPointerInfoEXT>{ enum { value = true }; };
+ template <> struct StructExtends<ImportMemoryFdInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ImportMemoryHostPointerInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoNV>{ enum { value = true }; };
+ template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryAllocateFlagsInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryDedicatedAllocateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryRequirements2, MemoryDedicatedRequirements>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryOpaqueCaptureAddressAllocateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryPriorityAllocateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubmitInfo, PerformanceQuerySubmitInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice8BitStorageFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice8BitStorageFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceBlendOperationAdvancedPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBufferDeviceAddressFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBufferDeviceAddressFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBufferDeviceAddressFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBufferDeviceAddressFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCoherentMemoryFeaturesAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCoherentMemoryFeaturesAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceComputeShaderDerivativesFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceComputeShaderDerivativesFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceConditionalRenderingFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceConditionalRenderingFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceConservativeRasterizationPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCooperativeMatrixFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCooperativeMatrixFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceCooperativeMatrixPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCornerSampledImageFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCornerSampledImageFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCoverageReductionModeFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCoverageReductionModeFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDepthClipEnableFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDepthClipEnableFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDepthStencilResolveProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDescriptorIndexingFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDescriptorIndexingFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDescriptorIndexingProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDiscardRectanglePropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDriverProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceExternalImageFormatInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceExternalMemoryHostPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFeatures2>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFloatControlsProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentDensityMapFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentDensityMapFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFragmentDensityMapPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentShaderInterlockFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderInterlockFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceHostQueryResetFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceHostQueryResetFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceIDProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageDrmFormatModifierInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageViewImageFormatInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceImagelessFramebufferFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceImagelessFramebufferFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceIndexTypeUint8FeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceIndexTypeUint8FeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceInlineUniformBlockPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceLineRasterizationFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceLineRasterizationFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceLineRasterizationPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMaintenance3Properties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceMemoryProperties2, PhysicalDeviceMemoryBudgetPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMemoryPriorityFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMemoryPriorityFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMeshShaderFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMeshShaderFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMeshShaderPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePCIBusInfoPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevicePerformanceQueryFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevicePerformanceQueryFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePerformanceQueryPropertiesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePointClippingProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceProtectedMemoryProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePushDescriptorPropertiesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceRayTracingPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSampleLocationsPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSamplerFilterMinmaxProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceScalarBlockLayoutFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceScalarBlockLayoutFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSeparateDepthStencilLayoutsFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSeparateDepthStencilLayoutsFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderAtomicInt64Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderAtomicInt64Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderClockFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderClockFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCoreProperties2AMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCorePropertiesAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDrawParametersFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDrawParametersFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderFloat16Int8Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderFloat16Int8Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSMBuiltinsFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSMBuiltinsFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderSMBuiltinsPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSubgroupExtendedTypesFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSubgroupExtendedTypesFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShadingRateImagePropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSubgroupProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSubgroupSizeControlFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSubgroupSizeControlFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSubgroupSizeControlPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTexelBufferAlignmentFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTexelBufferAlignmentFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTexelBufferAlignmentPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTimelineSemaphoreFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTimelineSemaphoreFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTimelineSemaphoreProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTransformFeedbackPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceUniformBufferStandardLayoutFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceUniformBufferStandardLayoutFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVariablePointersFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVariablePointersFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVertexAttributeDivisorPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkan11Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkan11Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVulkan11Properties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkan12Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkan12Features>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVulkan12Properties>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkanMemoryModelFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkanMemoryModelFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceYcbcrImageArraysFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceYcbcrImageArraysFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineColorBlendStateCreateInfo, PipelineColorBlendAdvancedStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineCompilerControlCreateInfoAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<ComputePipelineCreateInfo, PipelineCompilerControlCreateInfoAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageModulationStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageReductionStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageToColorStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineCreationFeedbackCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<ComputePipelineCreateInfo, PipelineCreationFeedbackCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<RayTracingPipelineCreateInfoNV, PipelineCreationFeedbackCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineDiscardRectangleStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationConservativeStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationDepthClipStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationLineStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateRasterizationOrderAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateStreamCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineRepresentativeFragmentTestStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineSampleLocationsStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineShaderStageCreateInfo, PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineTessellationStateCreateInfo, PipelineTessellationDomainOriginStateCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineVertexInputStateCreateInfo, PipelineVertexInputDivisorStateCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportCoarseSampleOrderStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportExclusiveScissorStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportShadingRateImageStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportSwizzleStateCreateInfoNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportWScalingStateCreateInfoNV>{ enum { value = true }; };
+ template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
+ template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<MemoryPriorityAllocateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceConservativeRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCooperativeMatrixPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceCustomBorderColorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDepthStencilResolveProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDescriptorIndexingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDiscardRectanglePropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceDriverProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceExternalImageFormatInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceExternalMemoryHostPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFeatures2, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFloatControlsProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceIDProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceImageDrmFormatModifierInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceImageViewImageFormatInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceInlineUniformBlockPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceLineRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMaintenance3Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMemoryBudgetPropertiesEXT, PhysicalDeviceMemoryProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceMultiviewProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePCIBusInfoPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePerformanceQueryPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePointClippingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct StructExtends<PhysicalDevicePortabilitySubsetPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ template <> struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceProtectedMemoryProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDevicePushDescriptorPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct StructExtends<PhysicalDeviceRayTracingFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceRayTracingFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceRobustness2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSampleLocationsPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSamplerFilterMinmaxProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderCoreProperties2AMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderCorePropertiesAMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceShadingRateImagePropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceTransformFeedbackPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkan11Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkan11Features, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkan11Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkan12Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkan12Features, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkan12Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineColorBlendAdvancedStateCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, ComputePipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCoverageModulationStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCoverageReductionStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCoverageToColorStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, ComputePipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoNV>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ template <> struct StructExtends<PipelineDiscardRectangleStateCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineRasterizationConservativeStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineRasterizationDepthClipStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineRasterizationLineStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineRasterizationStateRasterizationOrderAMD, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineRasterizationStateStreamCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineRepresentativeFragmentTestStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineSampleLocationsStateCreateInfoEXT, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineTessellationDomainOriginStateCreateInfo, PipelineTessellationStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineVertexInputDivisorStateCreateInfoEXT, PipelineVertexInputStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineViewportCoarseSampleOrderStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineViewportExclusiveScissorStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineViewportShadingRateImageStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineViewportSwizzleStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PipelineViewportWScalingStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_GGP
- template <> struct isStructureChainValid<PresentInfoKHR, PresentFrameTokenGGP>{ enum { value = true }; };
+ template <> struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_GGP*/
- template <> struct isStructureChainValid<PresentInfoKHR, PresentRegionsKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PresentInfoKHR, PresentTimesInfoGOOGLE>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubmitInfo, ProtectedSubmitInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<QueryPoolCreateInfo, QueryPoolPerformanceCreateInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<QueueFamilyProperties2, QueueFamilyCheckpointPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassAttachmentBeginInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassCreateInfo2, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassInputAttachmentAspectCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassMultiviewCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassSampleLocationsBeginInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageMemoryBarrier, SampleLocationsInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SamplerCreateInfo, SamplerReductionModeCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageFormatProperties2, SamplerYcbcrConversionImageFormatProperties>{ enum { value = true }; };
- template <> struct isStructureChainValid<SamplerCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageViewCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<SemaphoreCreateInfo, SemaphoreTypeCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceExternalSemaphoreInfo, SemaphoreTypeCreateInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ShaderModuleCreateInfo, ShaderModuleValidationCacheCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SharedPresentSurfaceCapabilitiesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubpassDescription2, SubpassDescriptionDepthStencilResolve>{ enum { value = true }; };
+ template <> struct StructExtends<PresentRegionsKHR, PresentInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<PresentTimesInfoGOOGLE, PresentInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassInputAttachmentAspectCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassMultiviewCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
+ template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
+ template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<SamplerYcbcrConversionInfo, SamplerCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SamplerYcbcrConversionInfo, ImageViewCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SemaphoreTypeCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SemaphoreTypeCreateInfo, PhysicalDeviceExternalSemaphoreInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, ShaderModuleCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<SharedPresentSurfaceCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+ template <> struct StructExtends<SubpassDescriptionDepthStencilResolve, SubpassDescription2>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SurfaceCapabilitiesFullScreenExclusiveEXT>{ enum { value = true }; };
+ template <> struct StructExtends<SurfaceCapabilitiesFullScreenExclusiveEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<PhysicalDeviceSurfaceInfo2KHR, SurfaceFullScreenExclusiveInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SurfaceFullScreenExclusiveInfoEXT>{ enum { value = true }; };
+ template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+ template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<PhysicalDeviceSurfaceInfo2KHR, SurfaceFullScreenExclusiveWin32InfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SurfaceFullScreenExclusiveWin32InfoEXT>{ enum { value = true }; };
+ template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+ template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SurfaceProtectedCapabilitiesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainCounterCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainDisplayNativeHdrCreateInfoAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageFormatProperties2, TextureLODGatherFormatPropertiesAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubmitInfo, TimelineSemaphoreSubmitInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindSparseInfo, TimelineSemaphoreSubmitInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFlagsEXT>{ enum { value = true }; };
+ template <> struct StructExtends<SurfaceProtectedCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+ template <> struct StructExtends<SwapchainCounterCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<SwapchainDisplayNativeHdrCreateInfoAMD, SwapchainCreateInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<TimelineSemaphoreSubmitInfo, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<TimelineSemaphoreSubmitInfo, BindSparseInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ValidationFeaturesEXT, InstanceCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoKHR>{ enum { value = true }; };
+ template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
- template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoNV>{ enum { value = true }; };
+ template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetAccelerationStructureNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetInlineUniformBlockEXT>{ enum { value = true }; };
+ template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
+ template <> struct StructExtends<WriteDescriptorSetInlineUniformBlockEXT, WriteDescriptorSet>{ enum { value = true }; };
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
class DynamicLoader
{
public:
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
- DynamicLoader() VULKAN_HPP_NOEXCEPT : m_success( false )
-#else
- DynamicLoader() : m_success( false )
-#endif
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT : m_success( false )
+# else
+ DynamicLoader( std::string const & vulkanLibraryName = {} ) : m_success( false )
+# endif
{
-#if defined(__linux__)
- m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
-#elif defined(__APPLE__)
- m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
-#elif defined(_WIN32)
- m_library = LoadLibrary( TEXT( "vulkan-1.dll" ) );
-#else
- assert( false && "unsupported platform" );
-#endif
+ if ( !vulkanLibraryName.empty() )
+ {
+# if defined( __linux__ ) || defined( __APPLE__ )
+ m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
+# elif defined( _WIN32 )
+ m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
+# else
+# error unsupported platform
+# endif
+ }
+ else
+ {
+# if defined( __linux__ )
+ m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
+ if ( m_library == nullptr )
+ {
+ m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL );
+ }
+# elif defined( __APPLE__ )
+ m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
+# elif defined( _WIN32 )
+ m_library = ::LoadLibraryA( "vulkan-1.dll" );
+# else
+# error unsupported platform
+# endif
+ }
- m_success = m_library != 0;
+ m_success = (m_library != nullptr);
#ifndef VULKAN_HPP_NO_EXCEPTIONS
if ( !m_success )
{
@@ -70054,93 +90097,160 @@ namespace VULKAN_HPP_NAMESPACE
#endif
}
+ DynamicLoader( DynamicLoader const& ) = delete;
+
+ DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
+ : m_success(other.m_success)
+ , m_library(other.m_library)
+ {
+ other.m_library = nullptr;
+ }
+
+ DynamicLoader &operator=( DynamicLoader const& ) = delete;
+
+ DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
+ {
+ m_success = other.m_success;
+ std::swap(m_library, other.m_library);
+ return *this;
+ }
+
~DynamicLoader() VULKAN_HPP_NOEXCEPT
{
if ( m_library )
{
-#if defined(__linux__) || defined(__APPLE__)
+# if defined( __linux__ ) || defined( __APPLE__ )
dlclose( m_library );
-#elif defined(_WIN32)
- FreeLibrary( m_library );
-#endif
+# elif defined( _WIN32 )
+ ::FreeLibrary( m_library );
+# else
+# error unsupported platform
+# endif
}
}
template <typename T>
T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT
{
-#if defined(__linux__) || defined(__APPLE__)
+# if defined( __linux__ ) || defined( __APPLE__ )
return (T)dlsym( m_library, function );
-#elif defined(_WIN32)
- return (T)GetProcAddress( m_library, function );
-#endif
+# elif defined( _WIN32 )
+ return (T)::GetProcAddress( m_library, function );
+# else
+# error unsupported platform
+# endif
}
bool success() const VULKAN_HPP_NOEXCEPT { return m_success; }
private:
bool m_success;
-#if defined(__linux__) || defined(__APPLE__)
- void *m_library;
-#elif defined(_WIN32)
- HMODULE m_library;
-#else
-#error unsupported platform
-#endif
+# if defined( __linux__ ) || defined( __APPLE__ )
+ void * m_library;
+# elif defined( _WIN32 )
+ ::HINSTANCE m_library;
+# else
+# error unsupported platform
+# endif
};
#endif
+
class DispatchLoaderDynamic
{
public:
- PFN_vkCreateInstance vkCreateInstance = 0;
- PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
- PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
- PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
+ PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
+ PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+ PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+ PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+ PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
+ PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
+ PFN_vkAllocateMemory vkAllocateMemory = 0;
PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
+ PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ PFN_vkBindBufferMemory vkBindBufferMemory = 0;
+ PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+ PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
+ PFN_vkBindImageMemory vkBindImageMemory = 0;
+ PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+ PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkBuildAccelerationStructureKHR vkBuildAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
- PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
+ PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+ PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+ PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+ PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdBuildAccelerationStructureIndirectKHR vkCmdBuildAccelerationStructureIndirectKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdBuildAccelerationStructureKHR vkCmdBuildAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+ PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+ PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+ PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+ PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
PFN_vkCmdDispatch vkCmdDispatch = 0;
- PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+ PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
PFN_vkCmdDraw vkCmdDraw = 0;
PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
- PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
+ PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
- PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
+ PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
@@ -70149,185 +90259,295 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdEndQuery vkCmdEndQuery = 0;
PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
- PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
+ PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+ PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
- PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
+ PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
- PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0;
+ PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
PFN_vkCmdPushConstants vkCmdPushConstants = 0;
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
- PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0;
PFN_vkCmdResetEvent vkCmdResetEvent = 0;
PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+ PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+ PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
- PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+ PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
+ PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
+ PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
+ PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+ PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
PFN_vkCmdSetEvent vkCmdSetEvent = 0;
PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
+ PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
+ PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+ PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+ PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+ PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
PFN_vkCmdSetViewport vkCmdSetViewport = 0;
PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+ PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
- PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
- PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
- PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
- PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
- PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
- PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
- PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
- PFN_vkAllocateMemory vkAllocateMemory = 0;
- PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
- PFN_vkBindBufferMemory vkBindBufferMemory = 0;
- PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
- PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
- PFN_vkBindImageMemory vkBindImageMemory = 0;
- PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
- PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
PFN_vkCreateBuffer vkCreateBuffer = 0;
PFN_vkCreateBufferView vkCreateBufferView = 0;
PFN_vkCreateCommandPool vkCreateCommandPool = 0;
PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
+ PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
+ PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
- PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+ PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
+ PFN_vkCreateDevice vkCreateDevice = 0;
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
+ PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
PFN_vkCreateEvent vkCreateEvent = 0;
PFN_vkCreateFence vkCreateFence = 0;
PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+ PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
PFN_vkCreateImage vkCreateImage = 0;
+#ifdef VK_USE_PLATFORM_FUCHSIA
+ PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
PFN_vkCreateImageView vkCreateImageView = 0;
- PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0;
- PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0;
+ PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
+ PFN_vkCreateInstance vkCreateInstance = 0;
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#ifdef VK_USE_PLATFORM_METAL_EXT
+ PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+ PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
PFN_vkCreateRenderPass vkCreateRenderPass = 0;
- PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
+ PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
PFN_vkCreateSampler vkCreateSampler = 0;
- PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+ PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
PFN_vkCreateSemaphore vkCreateSemaphore = 0;
PFN_vkCreateShaderModule vkCreateShaderModule = 0;
PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+#ifdef VK_USE_PLATFORM_GGP
+ PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
+#endif /*VK_USE_PLATFORM_GGP*/
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
+#ifdef VK_USE_PLATFORM_VI_NN
+ PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
+ PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkDestroyBuffer vkDestroyBuffer = 0;
PFN_vkDestroyBufferView vkDestroyBufferView = 0;
PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
+ PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
+ PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
- PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+ PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
PFN_vkDestroyDevice vkDestroyDevice = 0;
PFN_vkDestroyEvent vkDestroyEvent = 0;
PFN_vkDestroyFence vkDestroyFence = 0;
PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
PFN_vkDestroyImage vkDestroyImage = 0;
PFN_vkDestroyImageView vkDestroyImageView = 0;
- PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0;
- PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0;
+ PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
+ PFN_vkDestroyInstance vkDestroyInstance = 0;
PFN_vkDestroyPipeline vkDestroyPipeline = 0;
PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+ PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
PFN_vkDestroySampler vkDestroySampler = 0;
- PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+ PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
PFN_vkDestroySemaphore vkDestroySemaphore = 0;
PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
+ PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
+ PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
+ PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
+ PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+ PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
+ PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
+ PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+ PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+ PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+ PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
PFN_vkFreeMemory vkFreeMemory = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetAccelerationStructureMemoryRequirementsKHR vkGetAccelerationStructureMemoryRequirementsKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
#ifdef VK_USE_PLATFORM_ANDROID_KHR
PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
+ PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
- PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
- PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
+ PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
+ PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
- PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
- PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+ PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+ PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
#ifdef VK_USE_PLATFORM_WIN32_KHR
PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
- PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
+ PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
+ PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
+ PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
+ PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
+ PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
PFN_vkGetEventStatus vkGetEventStatus = 0;
PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
PFN_vkGetFenceStatus vkGetFenceStatus = 0;
#ifdef VK_USE_PLATFORM_WIN32_KHR
PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
- PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+ PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
- PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+ PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+ PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
+ PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
#ifdef VK_USE_PLATFORM_ANDROID_KHR
PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
@@ -70345,168 +90565,46 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
- PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
- PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
- PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
- PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
- PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
- PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
- PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
- PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
- PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
- PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
- PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
- PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
- PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
- PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
- PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
- PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
- PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
- PFN_vkMapMemory vkMapMemory = 0;
- PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
- PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
- PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
- PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
- PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0;
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
- PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
- PFN_vkResetCommandPool vkResetCommandPool = 0;
- PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
- PFN_vkResetEvent vkResetEvent = 0;
- PFN_vkResetFences vkResetFences = 0;
- PFN_vkResetQueryPool vkResetQueryPool = 0;
- PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
- PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
- PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
- PFN_vkSetEvent vkSetEvent = 0;
- PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
- PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
- PFN_vkSignalSemaphore vkSignalSemaphore = 0;
- PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
- PFN_vkTrimCommandPool vkTrimCommandPool = 0;
- PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
- PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
- PFN_vkUnmapMemory vkUnmapMemory = 0;
- PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0;
- PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
- PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
- PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
- PFN_vkWaitForFences vkWaitForFences = 0;
- PFN_vkWaitSemaphores vkWaitSemaphores = 0;
- PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
- PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
- PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
- PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
-#ifdef VK_USE_PLATFORM_IOS_MVK
- PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-#ifdef VK_USE_PLATFORM_FUCHSIA
- PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-#ifdef VK_USE_PLATFORM_METAL_EXT
- PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
-#ifdef VK_USE_PLATFORM_GGP
- PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
-#endif /*VK_USE_PLATFORM_GGP*/
-#ifdef VK_USE_PLATFORM_VI_NN
- PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
-#endif /*VK_USE_PLATFORM_VI_NN*/
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_XCB_KHR
- PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
- PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
- PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
- PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
- PFN_vkDestroyInstance vkDestroyInstance = 0;
- PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
- PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
- PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
- PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
- PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
- PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
- PFN_vkCreateDevice vkCreateDevice = 0;
- PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
- PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
- PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
- PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
- PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
- PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
- PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
- PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
- PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
- PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
- PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+ PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
- PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
- PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+ PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
- PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
- PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0;
+ PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
- PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
- PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
- PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
- PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
- PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
@@ -70531,11 +90629,49 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_KHR
PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+ PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+ PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
+ PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
+ PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
+ PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+ PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
- PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
- PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
+ PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+ PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
+ PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
+ PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
+ PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
+ PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
+ PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
+ PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
+ PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
+ PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
+ PFN_vkMapMemory vkMapMemory = 0;
+ PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
+ PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
PFN_vkQueueBindSparse vkQueueBindSparse = 0;
PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
@@ -70544,24 +90680,67 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
PFN_vkQueueSubmit vkQueueSubmit = 0;
PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
+ PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
+ PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
+ PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+ PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
+ PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
+ PFN_vkResetCommandPool vkResetCommandPool = 0;
+ PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
+ PFN_vkResetEvent vkResetEvent = 0;
+ PFN_vkResetFences vkResetFences = 0;
+ PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+ PFN_vkResetQueryPool vkResetQueryPool = 0;
+ PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
+ PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
+ PFN_vkSetEvent vkSetEvent = 0;
+ PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+ PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+ PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
+ PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
+ PFN_vkSignalSemaphore vkSignalSemaphore = 0;
+ PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
+ PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+ PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+ PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
+ PFN_vkUnmapMemory vkUnmapMemory = 0;
+ PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+ PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
+ PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
+ PFN_vkWaitForFences vkWaitForFences = 0;
+ PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
+ PFN_vkWaitSemaphores vkWaitSemaphores = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
public:
DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
#if !defined(VK_NO_PROTOTYPES)
// This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
- DispatchLoaderDynamic(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
+ template <typename DynamicLoader>
+ void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT
{
- init(instance, device);
+ PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
+ PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
+ init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
}
// This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
+ template <typename DynamicLoader
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+ = VULKAN_HPP_NAMESPACE::DynamicLoader
+#endif
+ >
void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
{
- static vk::DynamicLoader dl;
- PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
- PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
- init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
+ static DynamicLoader dl;
+ init(instance, device, dl);
}
#endif // !defined(VK_NO_PROTOTYPES)
@@ -70601,11 +90780,19 @@ namespace VULKAN_HPP_NAMESPACE
void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
{
VkInstance instance = static_cast<VkInstance>(instanceCpp);
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+ vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+ vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
#ifdef VK_USE_PLATFORM_IOS_MVK
@@ -70643,61 +90830,69 @@ namespace VULKAN_HPP_NAMESPACE
vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
- vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
- vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
- vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
- vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
- vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
- vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+ vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+ vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+ if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+ vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+ vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) );
vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+ vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
- vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
- vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+ vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+ if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+ if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
- vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+ if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
- vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+ vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+ if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
- vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
- vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX" ) );
+ vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+ if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
- vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+ if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
- vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+ vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+ if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
- vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+ vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+ if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
- vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+ if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
- vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+ if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
@@ -70726,50 +90921,105 @@ namespace VULKAN_HPP_NAMESPACE
vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
+ vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
+ vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
+ vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
+ vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
+ vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
+ vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
+ vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) );
+ if ( !vkBindAccelerationStructureMemoryKHR ) vkBindAccelerationStructureMemoryKHR = vkBindAccelerationStructureMemoryNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
+ vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
+ vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
+ if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+ vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
+ vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
+ vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
+ if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
- vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
+ if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
+ vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
+ vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
+ vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
+ vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
+ vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
+ vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
+ vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
- vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
+ vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
+ if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
- vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
+ if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+ if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
- vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
+ if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+ if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
@@ -70778,100 +91028,127 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
- vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
+ if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
+ vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
- vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
+ if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
- vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) );
+ vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
- vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdReserveSpaceForCommandsNVX" ) );
vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
+ vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
+ vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) );
vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
- vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+ vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+ vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
+ vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
+ vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
+ vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+ if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
+ vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+ vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
+ vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
+ vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
+ vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
+ vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+ if ( !vkCmdWriteAccelerationStructuresPropertiesKHR ) vkCmdWriteAccelerationStructuresPropertiesKHR = vkCmdWriteAccelerationStructuresPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
- vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
- vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
- vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
- vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
- vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
- vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
- vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
- vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
- vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
- vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
- vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
- vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
- vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
- vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
- vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
- vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
+ vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
+ if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
- vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNVX" ) );
- vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetInstanceProcAddr( instance, "vkCreateObjectTableNVX" ) );
+ vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
+ vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
- vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
+ if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
- vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
+ vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
+ if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
@@ -70879,66 +91156,101 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
+ if ( !vkDestroyAccelerationStructureKHR ) vkDestroyAccelerationStructureKHR = vkDestroyAccelerationStructureNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
- vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+ vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
+ if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
- vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNVX" ) );
- vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetInstanceProcAddr( instance, "vkDestroyObjectTableNVX" ) );
+ vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
+ vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
- vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
+ vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
+ if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
+ vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
+ if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+ if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
- vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
- vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
+ if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+ if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
- vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
- vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+ if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
- vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
@@ -70948,14 +91260,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
- vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
+ vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
+ if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
- vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
+ vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
+ if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
+ vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
@@ -70978,12 +91294,22 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
+ vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+ if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
- vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
+ if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
@@ -71006,96 +91332,157 @@ namespace VULKAN_HPP_NAMESPACE
vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
+ vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
+ vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
+ vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
+ vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
+ vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
+ vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
+ vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+ vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
- vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkRegisterObjectsNVX" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
+ vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
- vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
+ if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
- vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
+ vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
- vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
+ if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
+ vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+ if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
- vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkUnregisterObjectsNVX" ) );
- vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+ vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
+ if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
- vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
- vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
- vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
- vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
- vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
- vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
- vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
- vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
- vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
- vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
+ if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
}
void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
{
VkDevice device = static_cast<VkDevice>(deviceCpp);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
+ vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+ vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+ vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+ vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+ vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+ vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) );
+ if ( !vkBindAccelerationStructureMemoryKHR ) vkBindAccelerationStructureMemoryKHR = vkBindAccelerationStructureMemoryNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+ vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
+ vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+ if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+ vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+ vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
+ vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+ if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
- vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+ if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+ vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+ vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+ vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+ vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+ vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+ vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+ vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
- vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
+ vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+ if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
- vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+ if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+ if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
- vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+ if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+ if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
@@ -71104,100 +91491,127 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
- vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+ if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+ vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
- vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+ if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
- vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) );
+ vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
- vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdReserveSpaceForCommandsNVX" ) );
vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+ vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+ vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
- vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+ vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+ vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+ vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+ vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
+ vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+ if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+ vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+ vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+ vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+ vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+ vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+ vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+ if ( !vkCmdWriteAccelerationStructuresPropertiesKHR ) vkCmdWriteAccelerationStructuresPropertiesKHR = vkCmdWriteAccelerationStructuresPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
- vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
- vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
- vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
- vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
- vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
- vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
- vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
- vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
- vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
- vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
- vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
- vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
- vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
- vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
- vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
- vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+ vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+ if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
- vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNVX" ) );
- vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetDeviceProcAddr( device, "vkCreateObjectTableNVX" ) );
+ vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+ vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
- vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+ if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
- vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+ vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+ if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
@@ -71205,66 +91619,101 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+ if ( !vkDestroyAccelerationStructureKHR ) vkDestroyAccelerationStructureKHR = vkDestroyAccelerationStructureNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
- vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+ vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+ if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
- vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNVX" ) );
- vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetDeviceProcAddr( device, "vkDestroyObjectTableNVX" ) );
+ vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+ vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
- vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+ vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+ if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+ vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+ if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+ if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
- vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
- vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+ if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+ if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
- vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
- vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+ if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
- vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
@@ -71274,14 +91723,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
- vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+ vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+ if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
- vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+ vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+ if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+ vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
@@ -71304,12 +91757,22 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+ vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+ if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
- vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+ if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
@@ -71332,49 +91795,374 @@ namespace VULKAN_HPP_NAMESPACE
vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+ vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+ vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+ vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+ vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+ vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+ vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+ vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+ vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
- vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetDeviceProcAddr( device, "vkRegisterObjectsNVX" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+ vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
- vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+ if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
- vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+ vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
- vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+ if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
+ vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+ if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
- vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetDeviceProcAddr( device, "vkUnregisterObjectsNVX" ) );
- vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+ vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+ if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
- vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
- vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
- vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
- vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
- vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
- vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
- vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
- vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
- vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
- vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+ if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
}
};
} // namespace VULKAN_HPP_NAMESPACE
+
+namespace std
+{
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const& accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkAccelerationStructureKHR>{}(static_cast<VkAccelerationStructureKHR>(accelerationStructureKHR));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Buffer>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Buffer const& buffer) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkBuffer>{}(static_cast<VkBuffer>(buffer));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::BufferView>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferView const& bufferView) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkBufferView>{}(static_cast<VkBufferView>(bufferView));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBuffer>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBuffer const& commandBuffer) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkCommandBuffer>{}(static_cast<VkCommandBuffer>(commandBuffer));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::CommandPool>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandPool const& commandPool) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkCommandPool>{}(static_cast<VkCommandPool>(commandPool));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const& debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDebugReportCallbackEXT>{}(static_cast<VkDebugReportCallbackEXT>(debugReportCallbackEXT));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const& debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDebugUtilsMessengerEXT>{}(static_cast<VkDebugUtilsMessengerEXT>(debugUtilsMessengerEXT));
+ }
+ };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DeferredOperationKHR const& deferredOperationKHR) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDeferredOperationKHR>{}(static_cast<VkDeferredOperationKHR>(deferredOperationKHR));
+ }
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPool>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPool const& descriptorPool) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDescriptorPool>{}(static_cast<VkDescriptorPool>(descriptorPool));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSet>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSet const& descriptorSet) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDescriptorSet>{}(static_cast<VkDescriptorSet>(descriptorSet));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayout const& descriptorSetLayout) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDescriptorSetLayout>{}(static_cast<VkDescriptorSetLayout>(descriptorSetLayout));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const& descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDescriptorUpdateTemplate>{}(static_cast<VkDescriptorUpdateTemplate>(descriptorUpdateTemplate));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Device>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Device const& device) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDevice>{}(static_cast<VkDevice>(device));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemory>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemory const& deviceMemory) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDeviceMemory>{}(static_cast<VkDeviceMemory>(deviceMemory));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayKHR>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayKHR const& displayKHR) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDisplayKHR>{}(static_cast<VkDisplayKHR>(displayKHR));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeKHR const& displayModeKHR) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkDisplayModeKHR>{}(static_cast<VkDisplayModeKHR>(displayModeKHR));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Event>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Event const& event) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkEvent>{}(static_cast<VkEvent>(event));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Fence>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Fence const& fence) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkFence>{}(static_cast<VkFence>(fence));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Framebuffer>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Framebuffer const& framebuffer) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkFramebuffer>{}(static_cast<VkFramebuffer>(framebuffer));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Image>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Image const& image) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkImage>{}(static_cast<VkImage>(image));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::ImageView>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageView const& imageView) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkImageView>{}(static_cast<VkImageView>(imageView));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const& indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkIndirectCommandsLayoutNV>{}(static_cast<VkIndirectCommandsLayoutNV>(indirectCommandsLayoutNV));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Instance>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Instance const& instance) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkInstance>{}(static_cast<VkInstance>(instance));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const& performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkPerformanceConfigurationINTEL>{}(static_cast<VkPerformanceConfigurationINTEL>(performanceConfigurationINTEL));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice const& physicalDevice) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkPhysicalDevice>{}(static_cast<VkPhysicalDevice>(physicalDevice));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Pipeline>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Pipeline const& pipeline) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkPipeline>{}(static_cast<VkPipeline>(pipeline));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCache>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCache const& pipelineCache) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkPipelineCache>{}(static_cast<VkPipelineCache>(pipelineCache));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineLayout>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLayout const& pipelineLayout) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkPipelineLayout>{}(static_cast<VkPipelineLayout>(pipelineLayout));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const& privateDataSlotEXT) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkPrivateDataSlotEXT>{}(static_cast<VkPrivateDataSlotEXT>(privateDataSlotEXT));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPool>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPool const& queryPool) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkQueryPool>{}(static_cast<VkQueryPool>(queryPool));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Queue>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Queue const& queue) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkQueue>{}(static_cast<VkQueue>(queue));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPass>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPass const& renderPass) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkRenderPass>{}(static_cast<VkRenderPass>(renderPass));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Sampler>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Sampler const& sampler) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkSampler>{}(static_cast<VkSampler>(sampler));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const& samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkSamplerYcbcrConversion>{}(static_cast<VkSamplerYcbcrConversion>(samplerYcbcrConversion));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::Semaphore>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::Semaphore const& semaphore) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkSemaphore>{}(static_cast<VkSemaphore>(semaphore));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModule>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModule const& shaderModule) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkShaderModule>{}(static_cast<VkShaderModule>(shaderModule));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceKHR>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceKHR const& surfaceKHR) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkSurfaceKHR>{}(static_cast<VkSurfaceKHR>(surfaceKHR));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainKHR>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainKHR const& swapchainKHR) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkSwapchainKHR>{}(static_cast<VkSwapchainKHR>(swapchainKHR));
+ }
+ };
+
+ template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
+ {
+ std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationCacheEXT const& validationCacheEXT) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkValidationCacheEXT>{}(static_cast<VkValidationCacheEXT>(validationCacheEXT));
+ }
+ };
+}
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_android.h b/thirdparty/vulkan/include/vulkan/vulkan_android.h
index 9b8d3e276f..50ef85f13e 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_android.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_android.h
@@ -2,19 +2,9 @@
#define VULKAN_ANDROID_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_beta.h b/thirdparty/vulkan/include/vulkan/vulkan_beta.h
new file mode 100644
index 0000000000..4b7f2b2993
--- /dev/null
+++ b/thirdparty/vulkan/include/vulkan/vulkan_beta.h
@@ -0,0 +1,459 @@
+#ifndef VULKAN_BETA_H_
+#define VULKAN_BETA_H_ 1
+
+/*
+** Copyright (c) 2015-2020 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_portability_subset 1
+#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1
+#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset"
+typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 constantAlphaColorBlendFactors;
+ VkBool32 events;
+ VkBool32 imageViewFormatReinterpretation;
+ VkBool32 imageViewFormatSwizzle;
+ VkBool32 imageView2DOn3DImage;
+ VkBool32 multisampleArrayImage;
+ VkBool32 mutableComparisonSamplers;
+ VkBool32 pointPolygons;
+ VkBool32 samplerMipLodBias;
+ VkBool32 separateStencilMaskRef;
+ VkBool32 shaderSampleRateInterpolationFunctions;
+ VkBool32 tessellationIsolines;
+ VkBool32 tessellationPointMode;
+ VkBool32 triangleFans;
+ VkBool32 vertexAttributeAccessBeyondStride;
+} VkPhysicalDevicePortabilitySubsetFeaturesKHR;
+
+typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t minVertexInputBindingStrideAlignment;
+} VkPhysicalDevicePortabilitySubsetPropertiesKHR;
+
+
+
+#define VK_KHR_deferred_host_operations 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR)
+#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 3
+#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations"
+typedef struct VkDeferredOperationInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeferredOperationKHR operationHandle;
+} VkDeferredOperationInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation);
+typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator);
+typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation);
+typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR(
+ VkDevice device,
+ const VkAllocationCallbacks* pAllocator,
+ VkDeferredOperationKHR* pDeferredOperation);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR(
+ VkDevice device,
+ VkDeferredOperationKHR operation,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR(
+ VkDevice device,
+ VkDeferredOperationKHR operation);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR(
+ VkDevice device,
+ VkDeferredOperationKHR operation);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR(
+ VkDevice device,
+ VkDeferredOperationKHR operation);
+#endif
+
+
+#define VK_KHR_pipeline_library 1
+#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1
+#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library"
+typedef struct VkPipelineLibraryCreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t libraryCount;
+ const VkPipeline* pLibraries;
+} VkPipelineLibraryCreateInfoKHR;
+
+
+
+#define VK_KHR_ray_tracing 1
+#define VK_KHR_RAY_TRACING_SPEC_VERSION 8
+#define VK_KHR_RAY_TRACING_EXTENSION_NAME "VK_KHR_ray_tracing"
+
+typedef enum VkAccelerationStructureBuildTypeKHR {
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureBuildTypeKHR;
+typedef union VkDeviceOrHostAddressKHR {
+ VkDeviceAddress deviceAddress;
+ void* hostAddress;
+} VkDeviceOrHostAddressKHR;
+
+typedef union VkDeviceOrHostAddressConstKHR {
+ VkDeviceAddress deviceAddress;
+ const void* hostAddress;
+} VkDeviceOrHostAddressConstKHR;
+
+typedef struct VkAccelerationStructureBuildOffsetInfoKHR {
+ uint32_t primitiveCount;
+ uint32_t primitiveOffset;
+ uint32_t firstVertex;
+ uint32_t transformOffset;
+} VkAccelerationStructureBuildOffsetInfoKHR;
+
+typedef struct VkRayTracingShaderGroupCreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkRayTracingShaderGroupTypeKHR type;
+ uint32_t generalShader;
+ uint32_t closestHitShader;
+ uint32_t anyHitShader;
+ uint32_t intersectionShader;
+ const void* pShaderGroupCaptureReplayHandle;
+} VkRayTracingShaderGroupCreateInfoKHR;
+
+typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t maxPayloadSize;
+ uint32_t maxAttributeSize;
+ uint32_t maxCallableSize;
+} VkRayTracingPipelineInterfaceCreateInfoKHR;
+
+typedef struct VkRayTracingPipelineCreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineCreateFlags flags;
+ uint32_t stageCount;
+ const VkPipelineShaderStageCreateInfo* pStages;
+ uint32_t groupCount;
+ const VkRayTracingShaderGroupCreateInfoKHR* pGroups;
+ uint32_t maxRecursionDepth;
+ VkPipelineLibraryCreateInfoKHR libraries;
+ const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface;
+ VkPipelineLayout layout;
+ VkPipeline basePipelineHandle;
+ int32_t basePipelineIndex;
+} VkRayTracingPipelineCreateInfoKHR;
+
+typedef struct VkAccelerationStructureGeometryTrianglesDataKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkFormat vertexFormat;
+ VkDeviceOrHostAddressConstKHR vertexData;
+ VkDeviceSize vertexStride;
+ VkIndexType indexType;
+ VkDeviceOrHostAddressConstKHR indexData;
+ VkDeviceOrHostAddressConstKHR transformData;
+} VkAccelerationStructureGeometryTrianglesDataKHR;
+
+typedef struct VkAccelerationStructureGeometryAabbsDataKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceOrHostAddressConstKHR data;
+ VkDeviceSize stride;
+} VkAccelerationStructureGeometryAabbsDataKHR;
+
+typedef struct VkAccelerationStructureGeometryInstancesDataKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkBool32 arrayOfPointers;
+ VkDeviceOrHostAddressConstKHR data;
+} VkAccelerationStructureGeometryInstancesDataKHR;
+
+typedef union VkAccelerationStructureGeometryDataKHR {
+ VkAccelerationStructureGeometryTrianglesDataKHR triangles;
+ VkAccelerationStructureGeometryAabbsDataKHR aabbs;
+ VkAccelerationStructureGeometryInstancesDataKHR instances;
+} VkAccelerationStructureGeometryDataKHR;
+
+typedef struct VkAccelerationStructureGeometryKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkGeometryTypeKHR geometryType;
+ VkAccelerationStructureGeometryDataKHR geometry;
+ VkGeometryFlagsKHR flags;
+} VkAccelerationStructureGeometryKHR;
+
+typedef struct VkAccelerationStructureBuildGeometryInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccelerationStructureTypeKHR type;
+ VkBuildAccelerationStructureFlagsKHR flags;
+ VkBool32 update;
+ VkAccelerationStructureKHR srcAccelerationStructure;
+ VkAccelerationStructureKHR dstAccelerationStructure;
+ VkBool32 geometryArrayOfPointers;
+ uint32_t geometryCount;
+ const VkAccelerationStructureGeometryKHR* const* ppGeometries;
+ VkDeviceOrHostAddressKHR scratchData;
+} VkAccelerationStructureBuildGeometryInfoKHR;
+
+typedef struct VkAccelerationStructureCreateGeometryTypeInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkGeometryTypeKHR geometryType;
+ uint32_t maxPrimitiveCount;
+ VkIndexType indexType;
+ uint32_t maxVertexCount;
+ VkFormat vertexFormat;
+ VkBool32 allowsTransforms;
+} VkAccelerationStructureCreateGeometryTypeInfoKHR;
+
+typedef struct VkAccelerationStructureCreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceSize compactedSize;
+ VkAccelerationStructureTypeKHR type;
+ VkBuildAccelerationStructureFlagsKHR flags;
+ uint32_t maxGeometryCount;
+ const VkAccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos;
+ VkDeviceAddress deviceAddress;
+} VkAccelerationStructureCreateInfoKHR;
+
+typedef struct VkAccelerationStructureMemoryRequirementsInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccelerationStructureMemoryRequirementsTypeKHR type;
+ VkAccelerationStructureBuildTypeKHR buildType;
+ VkAccelerationStructureKHR accelerationStructure;
+} VkAccelerationStructureMemoryRequirementsInfoKHR;
+
+typedef struct VkPhysicalDeviceRayTracingFeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 rayTracing;
+ VkBool32 rayTracingShaderGroupHandleCaptureReplay;
+ VkBool32 rayTracingShaderGroupHandleCaptureReplayMixed;
+ VkBool32 rayTracingAccelerationStructureCaptureReplay;
+ VkBool32 rayTracingIndirectTraceRays;
+ VkBool32 rayTracingIndirectAccelerationStructureBuild;
+ VkBool32 rayTracingHostAccelerationStructureCommands;
+ VkBool32 rayQuery;
+ VkBool32 rayTracingPrimitiveCulling;
+} VkPhysicalDeviceRayTracingFeaturesKHR;
+
+typedef struct VkPhysicalDeviceRayTracingPropertiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t shaderGroupHandleSize;
+ uint32_t maxRecursionDepth;
+ uint32_t maxShaderGroupStride;
+ uint32_t shaderGroupBaseAlignment;
+ uint64_t maxGeometryCount;
+ uint64_t maxInstanceCount;
+ uint64_t maxPrimitiveCount;
+ uint32_t maxDescriptorSetAccelerationStructures;
+ uint32_t shaderGroupHandleCaptureReplaySize;
+} VkPhysicalDeviceRayTracingPropertiesKHR;
+
+typedef struct VkAccelerationStructureDeviceAddressInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccelerationStructureKHR accelerationStructure;
+} VkAccelerationStructureDeviceAddressInfoKHR;
+
+typedef struct VkAccelerationStructureVersionKHR {
+ VkStructureType sType;
+ const void* pNext;
+ const uint8_t* versionData;
+} VkAccelerationStructureVersionKHR;
+
+typedef struct VkStridedBufferRegionKHR {
+ VkBuffer buffer;
+ VkDeviceSize offset;
+ VkDeviceSize stride;
+ VkDeviceSize size;
+} VkStridedBufferRegionKHR;
+
+typedef struct VkTraceRaysIndirectCommandKHR {
+ uint32_t width;
+ uint32_t height;
+ uint32_t depth;
+} VkTraceRaysIndirectCommandKHR;
+
+typedef struct VkCopyAccelerationStructureToMemoryInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccelerationStructureKHR src;
+ VkDeviceOrHostAddressKHR dst;
+ VkCopyAccelerationStructureModeKHR mode;
+} VkCopyAccelerationStructureToMemoryInfoKHR;
+
+typedef struct VkCopyMemoryToAccelerationStructureInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceOrHostAddressConstKHR src;
+ VkAccelerationStructureKHR dst;
+ VkCopyAccelerationStructureModeKHR mode;
+} VkCopyMemoryToAccelerationStructureInfoKHR;
+
+typedef struct VkCopyAccelerationStructureInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccelerationStructureKHR src;
+ VkAccelerationStructureKHR dst;
+ VkCopyAccelerationStructureModeKHR mode;
+} VkCopyAccelerationStructureInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure);
+typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsKHR)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureIndirectKHR)(VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructureKHR)(VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionKHR* version);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR(
+ VkDevice device,
+ const VkAccelerationStructureCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkAccelerationStructureKHR* pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsKHR(
+ VkDevice device,
+ const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureKHR(
+ VkCommandBuffer commandBuffer,
+ uint32_t infoCount,
+ const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+ const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureIndirectKHR(
+ VkCommandBuffer commandBuffer,
+ const VkAccelerationStructureBuildGeometryInfoKHR* pInfo,
+ VkBuffer indirectBuffer,
+ VkDeviceSize indirectOffset,
+ uint32_t indirectStride);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructureKHR(
+ VkDevice device,
+ uint32_t infoCount,
+ const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+ const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR(
+ VkDevice device,
+ const VkCopyAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR(
+ VkDevice device,
+ const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR(
+ VkDevice device,
+ const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR(
+ VkDevice device,
+ uint32_t accelerationStructureCount,
+ const VkAccelerationStructureKHR* pAccelerationStructures,
+ VkQueryType queryType,
+ size_t dataSize,
+ void* pData,
+ size_t stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR(
+ VkCommandBuffer commandBuffer,
+ const VkStridedBufferRegionKHR* pRaygenShaderBindingTable,
+ const VkStridedBufferRegionKHR* pMissShaderBindingTable,
+ const VkStridedBufferRegionKHR* pHitShaderBindingTable,
+ const VkStridedBufferRegionKHR* pCallableShaderBindingTable,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR(
+ VkDevice device,
+ VkPipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VkRayTracingPipelineCreateInfoKHR* pCreateInfos,
+ const VkAllocationCallbacks* pAllocator,
+ VkPipeline* pPipelines);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR(
+ VkDevice device,
+ const VkAccelerationStructureDeviceAddressInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+ VkDevice device,
+ VkPipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void* pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR(
+ VkCommandBuffer commandBuffer,
+ const VkStridedBufferRegionKHR* pRaygenShaderBindingTable,
+ const VkStridedBufferRegionKHR* pMissShaderBindingTable,
+ const VkStridedBufferRegionKHR* pHitShaderBindingTable,
+ const VkStridedBufferRegionKHR* pCallableShaderBindingTable,
+ VkBuffer buffer,
+ VkDeviceSize offset);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR(
+ VkDevice device,
+ const VkAccelerationStructureVersionKHR* version);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_core.h b/thirdparty/vulkan/include/vulkan/vulkan_core.h
index ea96fc43ed..ac904bca21 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_core.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_core.h
@@ -2,19 +2,9 @@
#define VULKAN_CORE_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
@@ -31,8 +21,20 @@ extern "C" {
#define VK_VERSION_1_0 1
#include "vk_platform.h"
+
+#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object;
+
+
+#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE)
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+ #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
+#else
+ #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
+#endif
+#endif
+
#define VK_MAKE_VERSION(major, minor, patch) \
- (((major) << 22) | ((minor) << 12) | (patch))
+ ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead.
//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0
@@ -40,31 +42,25 @@ extern "C" {
// Vulkan 1.0 version number
#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0
+// Version of this file
+#define VK_HEADER_VERSION 154
+
+// Complete version of this file
+#define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION)
+
#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22)
#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff)
#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff)
-// Version of this file
-#define VK_HEADER_VERSION 131
-
#define VK_NULL_HANDLE 0
-
-#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object;
-
-
-#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE)
-#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
- #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
-#else
- #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
-#endif
-#endif
-
-typedef uint32_t VkFlags;
typedef uint32_t VkBool32;
+typedef uint64_t VkDeviceAddress;
typedef uint64_t VkDeviceSize;
+typedef uint32_t VkFlags;
typedef uint32_t VkSampleMask;
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage)
VK_DEFINE_HANDLE(VkInstance)
VK_DEFINE_HANDLE(VkPhysicalDevice)
VK_DEFINE_HANDLE(VkDevice)
@@ -73,8 +69,6 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore)
VK_DEFINE_HANDLE(VkCommandBuffer)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory)
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer)
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView)
@@ -82,38 +76,30 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout)
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler)
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
-#define VK_LOD_CLAMP_NONE 1000.0f
-#define VK_REMAINING_MIP_LEVELS (~0U)
-#define VK_REMAINING_ARRAY_LAYERS (~0U)
-#define VK_WHOLE_SIZE (~0ULL)
#define VK_ATTACHMENT_UNUSED (~0U)
-#define VK_TRUE 1
#define VK_FALSE 0
+#define VK_LOD_CLAMP_NONE 1000.0f
#define VK_QUEUE_FAMILY_IGNORED (~0U)
+#define VK_REMAINING_ARRAY_LAYERS (~0U)
+#define VK_REMAINING_MIP_LEVELS (~0U)
#define VK_SUBPASS_EXTERNAL (~0U)
-#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256
-#define VK_UUID_SIZE 16
+#define VK_TRUE 1
+#define VK_WHOLE_SIZE (~0ULL)
#define VK_MAX_MEMORY_TYPES 32
#define VK_MAX_MEMORY_HEAPS 16
+#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256
+#define VK_UUID_SIZE 16
#define VK_MAX_EXTENSION_NAME_SIZE 256
#define VK_MAX_DESCRIPTION_SIZE 256
-typedef enum VkPipelineCacheHeaderVersion {
- VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
- VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE,
- VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE,
- VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1),
- VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
-} VkPipelineCacheHeaderVersion;
-
typedef enum VkResult {
VK_SUCCESS = 0,
VK_NOT_READY = 1,
@@ -145,17 +131,21 @@ typedef enum VkResult {
VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001,
VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
VK_ERROR_INVALID_SHADER_NV = -1000012000,
+ VK_ERROR_INCOMPATIBLE_VERSION_KHR = -1000150000,
VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000,
VK_ERROR_NOT_PERMITTED_EXT = -1000174001,
VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000,
+ VK_THREAD_IDLE_KHR = 1000268000,
+ VK_THREAD_DONE_KHR = 1000268001,
+ VK_OPERATION_DEFERRED_KHR = 1000268002,
+ VK_OPERATION_NOT_DEFERRED_KHR = 1000268003,
+ VK_PIPELINE_COMPILE_REQUIRED_EXT = 1000297000,
VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION,
VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
- VK_RESULT_BEGIN_RANGE = VK_ERROR_UNKNOWN,
- VK_RESULT_END_RANGE = VK_INCOMPLETE,
- VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_UNKNOWN + 1),
+ VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED_EXT,
VK_RESULT_MAX_ENUM = 0x7FFFFFFF
} VkResult;
@@ -352,6 +342,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001,
VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002,
VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000,
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001,
VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000,
VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000,
@@ -384,12 +375,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001,
VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002,
VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000,
- VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000,
- VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001,
- VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002,
- VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003,
- VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004,
- VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000,
VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000,
VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000,
@@ -453,6 +438,26 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001,
VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002,
VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000,
+ VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR = 1000165006,
+ VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000165007,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR = 1000150001,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR = 1000150008,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR = 1000150009,
+ VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010,
+ VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011,
+ VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR = 1000150013,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR = 1000150014,
+ VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015,
+ VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017,
+ VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018,
VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001,
@@ -464,6 +469,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005,
VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000,
VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002,
@@ -473,8 +480,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003,
VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004,
VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005,
- VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006,
- VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007,
VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009,
VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011,
@@ -507,7 +512,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000,
VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000,
- VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = 1000210000,
+ VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000,
VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001,
VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002,
VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003,
@@ -550,7 +555,10 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000,
VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000,
+ VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR = 1000268000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000,
VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001,
VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002,
@@ -558,8 +566,46 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004,
VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = 1000276000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000,
+ VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001,
+ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002,
+ VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003,
+ VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004,
+ VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005,
+ VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000,
+ VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001,
+ VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002,
+ VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = 1000295000,
+ VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = 1000295001,
+ VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = 1000295002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
+ VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = 1000335000,
+ VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = 1000337000,
+ VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = 1000337001,
+ VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = 1000337002,
+ VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = 1000337003,
+ VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = 1000337004,
+ VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = 1000337005,
+ VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = 1000337006,
+ VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = 1000337007,
+ VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = 1000337008,
+ VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = 1000337009,
+ VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = 1000337010,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000,
+ VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
@@ -618,8 +664,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
@@ -643,6 +689,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+ VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR,
+ VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
@@ -658,6 +706,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+ VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
@@ -673,29 +722,110 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
- VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO,
- VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
- VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1),
VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkStructureType;
+typedef enum VkImageLayout {
+ VK_IMAGE_LAYOUT_UNDEFINED = 0,
+ VK_IMAGE_LAYOUT_GENERAL = 1,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4,
+ VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5,
+ VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6,
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7,
+ VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
+ VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000,
+ VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001,
+ VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002,
+ VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003,
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
+ VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
+ VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003,
+ VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
+ VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF
+} VkImageLayout;
+
+typedef enum VkObjectType {
+ VK_OBJECT_TYPE_UNKNOWN = 0,
+ VK_OBJECT_TYPE_INSTANCE = 1,
+ VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2,
+ VK_OBJECT_TYPE_DEVICE = 3,
+ VK_OBJECT_TYPE_QUEUE = 4,
+ VK_OBJECT_TYPE_SEMAPHORE = 5,
+ VK_OBJECT_TYPE_COMMAND_BUFFER = 6,
+ VK_OBJECT_TYPE_FENCE = 7,
+ VK_OBJECT_TYPE_DEVICE_MEMORY = 8,
+ VK_OBJECT_TYPE_BUFFER = 9,
+ VK_OBJECT_TYPE_IMAGE = 10,
+ VK_OBJECT_TYPE_EVENT = 11,
+ VK_OBJECT_TYPE_QUERY_POOL = 12,
+ VK_OBJECT_TYPE_BUFFER_VIEW = 13,
+ VK_OBJECT_TYPE_IMAGE_VIEW = 14,
+ VK_OBJECT_TYPE_SHADER_MODULE = 15,
+ VK_OBJECT_TYPE_PIPELINE_CACHE = 16,
+ VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17,
+ VK_OBJECT_TYPE_RENDER_PASS = 18,
+ VK_OBJECT_TYPE_PIPELINE = 19,
+ VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20,
+ VK_OBJECT_TYPE_SAMPLER = 21,
+ VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22,
+ VK_OBJECT_TYPE_DESCRIPTOR_SET = 23,
+ VK_OBJECT_TYPE_FRAMEBUFFER = 24,
+ VK_OBJECT_TYPE_COMMAND_POOL = 25,
+ VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000,
+ VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000,
+ VK_OBJECT_TYPE_SURFACE_KHR = 1000000000,
+ VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000,
+ VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000,
+ VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001,
+ VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000,
+ VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000,
+ VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000,
+ VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000,
+ VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
+ VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
+ VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
+ VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = 1000295000,
+ VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+ VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+ VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
+ VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkObjectType;
+
+typedef enum VkVendorId {
+ VK_VENDOR_ID_VIV = 0x10001,
+ VK_VENDOR_ID_VSI = 0x10002,
+ VK_VENDOR_ID_KAZAN = 0x10003,
+ VK_VENDOR_ID_CODEPLAY = 0x10004,
+ VK_VENDOR_ID_MESA = 0x10005,
+ VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF
+} VkVendorId;
+
+typedef enum VkPipelineCacheHeaderVersion {
+ VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
+ VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheHeaderVersion;
+
typedef enum VkSystemAllocationScope {
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1,
VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2,
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4,
- VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
- VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE,
- VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1),
VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF
} VkSystemAllocationScope;
typedef enum VkInternalAllocationType {
VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0,
- VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
- VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
- VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1),
VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkInternalAllocationType;
@@ -941,6 +1071,8 @@ typedef enum VkFormat {
VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = 1000066011,
VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = 1000066012,
VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = 1000066013,
+ VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = 1000340000,
+ VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = 1000340001,
VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM,
VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM,
VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
@@ -975,41 +1107,29 @@ typedef enum VkFormat {
VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
- VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED,
- VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
- VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1),
VK_FORMAT_MAX_ENUM = 0x7FFFFFFF
} VkFormat;
-typedef enum VkImageType {
- VK_IMAGE_TYPE_1D = 0,
- VK_IMAGE_TYPE_2D = 1,
- VK_IMAGE_TYPE_3D = 2,
- VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D,
- VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D,
- VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1),
- VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
-} VkImageType;
-
typedef enum VkImageTiling {
VK_IMAGE_TILING_OPTIMAL = 0,
VK_IMAGE_TILING_LINEAR = 1,
VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000,
- VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR,
- VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1),
VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF
} VkImageTiling;
+typedef enum VkImageType {
+ VK_IMAGE_TYPE_1D = 0,
+ VK_IMAGE_TYPE_2D = 1,
+ VK_IMAGE_TYPE_3D = 2,
+ VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageType;
+
typedef enum VkPhysicalDeviceType {
VK_PHYSICAL_DEVICE_TYPE_OTHER = 0,
VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1,
VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2,
VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3,
VK_PHYSICAL_DEVICE_TYPE_CPU = 4,
- VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER,
- VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU,
- VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1),
VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkPhysicalDeviceType;
@@ -1019,69 +1139,19 @@ typedef enum VkQueryType {
VK_QUERY_TYPE_TIMESTAMP = 2,
VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004,
VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000,
- VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000,
+ VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000165000,
+ VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150000,
VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000,
- VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION,
- VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP,
- VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1),
+ VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkQueryType;
typedef enum VkSharingMode {
VK_SHARING_MODE_EXCLUSIVE = 0,
VK_SHARING_MODE_CONCURRENT = 1,
- VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE,
- VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT,
- VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1),
VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF
} VkSharingMode;
-typedef enum VkImageLayout {
- VK_IMAGE_LAYOUT_UNDEFINED = 0,
- VK_IMAGE_LAYOUT_GENERAL = 1,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4,
- VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5,
- VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6,
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7,
- VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
- VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000,
- VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001,
- VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000,
- VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001,
- VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002,
- VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003,
- VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
- VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
- VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003,
- VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
- VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
- VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
- VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED,
- VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1),
- VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF
-} VkImageLayout;
-
-typedef enum VkImageViewType {
- VK_IMAGE_VIEW_TYPE_1D = 0,
- VK_IMAGE_VIEW_TYPE_2D = 1,
- VK_IMAGE_VIEW_TYPE_3D = 2,
- VK_IMAGE_VIEW_TYPE_CUBE = 3,
- VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4,
- VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5,
- VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6,
- VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D,
- VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
- VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1),
- VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF
-} VkImageViewType;
-
typedef enum VkComponentSwizzle {
VK_COMPONENT_SWIZZLE_IDENTITY = 0,
VK_COMPONENT_SWIZZLE_ZERO = 1,
@@ -1090,111 +1160,19 @@ typedef enum VkComponentSwizzle {
VK_COMPONENT_SWIZZLE_G = 4,
VK_COMPONENT_SWIZZLE_B = 5,
VK_COMPONENT_SWIZZLE_A = 6,
- VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A,
- VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1),
VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF
} VkComponentSwizzle;
-typedef enum VkVertexInputRate {
- VK_VERTEX_INPUT_RATE_VERTEX = 0,
- VK_VERTEX_INPUT_RATE_INSTANCE = 1,
- VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX,
- VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE,
- VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1),
- VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF
-} VkVertexInputRate;
-
-typedef enum VkPrimitiveTopology {
- VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0,
- VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1,
- VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2,
- VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3,
- VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4,
- VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5,
- VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6,
- VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7,
- VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8,
- VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10,
- VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
- VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,
- VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1),
- VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF
-} VkPrimitiveTopology;
-
-typedef enum VkPolygonMode {
- VK_POLYGON_MODE_FILL = 0,
- VK_POLYGON_MODE_LINE = 1,
- VK_POLYGON_MODE_POINT = 2,
- VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000,
- VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL,
- VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT,
- VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1),
- VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF
-} VkPolygonMode;
-
-typedef enum VkFrontFace {
- VK_FRONT_FACE_COUNTER_CLOCKWISE = 0,
- VK_FRONT_FACE_CLOCKWISE = 1,
- VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE,
- VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE,
- VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1),
- VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF
-} VkFrontFace;
-
-typedef enum VkCompareOp {
- VK_COMPARE_OP_NEVER = 0,
- VK_COMPARE_OP_LESS = 1,
- VK_COMPARE_OP_EQUAL = 2,
- VK_COMPARE_OP_LESS_OR_EQUAL = 3,
- VK_COMPARE_OP_GREATER = 4,
- VK_COMPARE_OP_NOT_EQUAL = 5,
- VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
- VK_COMPARE_OP_ALWAYS = 7,
- VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER,
- VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS,
- VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1),
- VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF
-} VkCompareOp;
-
-typedef enum VkStencilOp {
- VK_STENCIL_OP_KEEP = 0,
- VK_STENCIL_OP_ZERO = 1,
- VK_STENCIL_OP_REPLACE = 2,
- VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3,
- VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4,
- VK_STENCIL_OP_INVERT = 5,
- VK_STENCIL_OP_INCREMENT_AND_WRAP = 6,
- VK_STENCIL_OP_DECREMENT_AND_WRAP = 7,
- VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP,
- VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP,
- VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1),
- VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF
-} VkStencilOp;
-
-typedef enum VkLogicOp {
- VK_LOGIC_OP_CLEAR = 0,
- VK_LOGIC_OP_AND = 1,
- VK_LOGIC_OP_AND_REVERSE = 2,
- VK_LOGIC_OP_COPY = 3,
- VK_LOGIC_OP_AND_INVERTED = 4,
- VK_LOGIC_OP_NO_OP = 5,
- VK_LOGIC_OP_XOR = 6,
- VK_LOGIC_OP_OR = 7,
- VK_LOGIC_OP_NOR = 8,
- VK_LOGIC_OP_EQUIVALENT = 9,
- VK_LOGIC_OP_INVERT = 10,
- VK_LOGIC_OP_OR_REVERSE = 11,
- VK_LOGIC_OP_COPY_INVERTED = 12,
- VK_LOGIC_OP_OR_INVERTED = 13,
- VK_LOGIC_OP_NAND = 14,
- VK_LOGIC_OP_SET = 15,
- VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR,
- VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET,
- VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1),
- VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF
-} VkLogicOp;
+typedef enum VkImageViewType {
+ VK_IMAGE_VIEW_TYPE_1D = 0,
+ VK_IMAGE_VIEW_TYPE_2D = 1,
+ VK_IMAGE_VIEW_TYPE_3D = 2,
+ VK_IMAGE_VIEW_TYPE_CUBE = 3,
+ VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4,
+ VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5,
+ VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6,
+ VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewType;
typedef enum VkBlendFactor {
VK_BLEND_FACTOR_ZERO = 0,
@@ -1216,9 +1194,6 @@ typedef enum VkBlendFactor {
VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16,
VK_BLEND_FACTOR_SRC1_ALPHA = 17,
VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18,
- VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO,
- VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA,
- VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1),
VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF
} VkBlendFactor;
@@ -1274,12 +1249,21 @@ typedef enum VkBlendOp {
VK_BLEND_OP_RED_EXT = 1000148043,
VK_BLEND_OP_GREEN_EXT = 1000148044,
VK_BLEND_OP_BLUE_EXT = 1000148045,
- VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD,
- VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX,
- VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1),
VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF
} VkBlendOp;
+typedef enum VkCompareOp {
+ VK_COMPARE_OP_NEVER = 0,
+ VK_COMPARE_OP_LESS = 1,
+ VK_COMPARE_OP_EQUAL = 2,
+ VK_COMPARE_OP_LESS_OR_EQUAL = 3,
+ VK_COMPARE_OP_GREATER = 4,
+ VK_COMPARE_OP_NOT_EQUAL = 5,
+ VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
+ VK_COMPARE_OP_ALWAYS = 7,
+ VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkCompareOp;
+
typedef enum VkDynamicState {
VK_DYNAMIC_STATE_VIEWPORT = 0,
VK_DYNAMIC_STATE_SCISSOR = 1,
@@ -1297,32 +1281,108 @@ typedef enum VkDynamicState {
VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006,
VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001,
VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000,
- VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT,
- VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
- VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1),
+ VK_DYNAMIC_STATE_CULL_MODE_EXT = 1000267000,
+ VK_DYNAMIC_STATE_FRONT_FACE_EXT = 1000267001,
+ VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = 1000267002,
+ VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = 1000267003,
+ VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = 1000267004,
+ VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = 1000267005,
+ VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = 1000267006,
+ VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = 1000267007,
+ VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = 1000267008,
+ VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = 1000267009,
+ VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = 1000267010,
+ VK_DYNAMIC_STATE_STENCIL_OP_EXT = 1000267011,
VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF
} VkDynamicState;
+typedef enum VkFrontFace {
+ VK_FRONT_FACE_COUNTER_CLOCKWISE = 0,
+ VK_FRONT_FACE_CLOCKWISE = 1,
+ VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF
+} VkFrontFace;
+
+typedef enum VkVertexInputRate {
+ VK_VERTEX_INPUT_RATE_VERTEX = 0,
+ VK_VERTEX_INPUT_RATE_INSTANCE = 1,
+ VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF
+} VkVertexInputRate;
+
+typedef enum VkPrimitiveTopology {
+ VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0,
+ VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1,
+ VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2,
+ VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3,
+ VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4,
+ VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5,
+ VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6,
+ VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7,
+ VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8,
+ VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9,
+ VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10,
+ VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF
+} VkPrimitiveTopology;
+
+typedef enum VkPolygonMode {
+ VK_POLYGON_MODE_FILL = 0,
+ VK_POLYGON_MODE_LINE = 1,
+ VK_POLYGON_MODE_POINT = 2,
+ VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000,
+ VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkPolygonMode;
+
+typedef enum VkStencilOp {
+ VK_STENCIL_OP_KEEP = 0,
+ VK_STENCIL_OP_ZERO = 1,
+ VK_STENCIL_OP_REPLACE = 2,
+ VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3,
+ VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4,
+ VK_STENCIL_OP_INVERT = 5,
+ VK_STENCIL_OP_INCREMENT_AND_WRAP = 6,
+ VK_STENCIL_OP_DECREMENT_AND_WRAP = 7,
+ VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF
+} VkStencilOp;
+
+typedef enum VkLogicOp {
+ VK_LOGIC_OP_CLEAR = 0,
+ VK_LOGIC_OP_AND = 1,
+ VK_LOGIC_OP_AND_REVERSE = 2,
+ VK_LOGIC_OP_COPY = 3,
+ VK_LOGIC_OP_AND_INVERTED = 4,
+ VK_LOGIC_OP_NO_OP = 5,
+ VK_LOGIC_OP_XOR = 6,
+ VK_LOGIC_OP_OR = 7,
+ VK_LOGIC_OP_NOR = 8,
+ VK_LOGIC_OP_EQUIVALENT = 9,
+ VK_LOGIC_OP_INVERT = 10,
+ VK_LOGIC_OP_OR_REVERSE = 11,
+ VK_LOGIC_OP_COPY_INVERTED = 12,
+ VK_LOGIC_OP_OR_INVERTED = 13,
+ VK_LOGIC_OP_NAND = 14,
+ VK_LOGIC_OP_SET = 15,
+ VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF
+} VkLogicOp;
+
+typedef enum VkBorderColor {
+ VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
+ VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
+ VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
+ VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
+ VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
+ VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
+ VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003,
+ VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004,
+ VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
+} VkBorderColor;
+
typedef enum VkFilter {
VK_FILTER_NEAREST = 0,
VK_FILTER_LINEAR = 1,
VK_FILTER_CUBIC_IMG = 1000015000,
VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG,
- VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST,
- VK_FILTER_END_RANGE = VK_FILTER_LINEAR,
- VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1),
VK_FILTER_MAX_ENUM = 0x7FFFFFFF
} VkFilter;
-typedef enum VkSamplerMipmapMode {
- VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
- VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
- VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST,
- VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR,
- VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1),
- VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
-} VkSamplerMipmapMode;
-
typedef enum VkSamplerAddressMode {
VK_SAMPLER_ADDRESS_MODE_REPEAT = 0,
VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1,
@@ -1330,24 +1390,14 @@ typedef enum VkSamplerAddressMode {
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3,
VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4,
VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
- VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT,
- VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
- VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1),
VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF
} VkSamplerAddressMode;
-typedef enum VkBorderColor {
- VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
- VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
- VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
- VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
- VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
- VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
- VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
- VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
- VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1),
- VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
-} VkBorderColor;
+typedef enum VkSamplerMipmapMode {
+ VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
+ VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
+ VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerMipmapMode;
typedef enum VkDescriptorType {
VK_DESCRIPTOR_TYPE_SAMPLER = 0,
@@ -1362,10 +1412,8 @@ typedef enum VkDescriptorType {
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9,
VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10,
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000,
- VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
- VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER,
- VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
- VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1),
+ VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000,
+ VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorType;
@@ -1373,118 +1421,98 @@ typedef enum VkAttachmentLoadOp {
VK_ATTACHMENT_LOAD_OP_LOAD = 0,
VK_ATTACHMENT_LOAD_OP_CLEAR = 1,
VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2,
- VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD,
- VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1),
VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF
} VkAttachmentLoadOp;
typedef enum VkAttachmentStoreOp {
VK_ATTACHMENT_STORE_OP_STORE = 0,
VK_ATTACHMENT_STORE_OP_DONT_CARE = 1,
- VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1),
+ VK_ATTACHMENT_STORE_OP_NONE_QCOM = 1000301000,
VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF
} VkAttachmentStoreOp;
typedef enum VkPipelineBindPoint {
VK_PIPELINE_BIND_POINT_GRAPHICS = 0,
VK_PIPELINE_BIND_POINT_COMPUTE = 1,
- VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = 1000165000,
- VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS,
- VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE,
- VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1),
+ VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000,
+ VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF
} VkPipelineBindPoint;
typedef enum VkCommandBufferLevel {
VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0,
VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1,
- VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
- VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY,
- VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1),
VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF
} VkCommandBufferLevel;
typedef enum VkIndexType {
VK_INDEX_TYPE_UINT16 = 0,
VK_INDEX_TYPE_UINT32 = 1,
- VK_INDEX_TYPE_NONE_NV = 1000165000,
+ VK_INDEX_TYPE_NONE_KHR = 1000165000,
VK_INDEX_TYPE_UINT8_EXT = 1000265000,
- VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16,
- VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32,
- VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1),
+ VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR,
VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkIndexType;
typedef enum VkSubpassContents {
VK_SUBPASS_CONTENTS_INLINE = 0,
VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1,
- VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE,
- VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS,
- VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1),
VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF
} VkSubpassContents;
-typedef enum VkObjectType {
- VK_OBJECT_TYPE_UNKNOWN = 0,
- VK_OBJECT_TYPE_INSTANCE = 1,
- VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2,
- VK_OBJECT_TYPE_DEVICE = 3,
- VK_OBJECT_TYPE_QUEUE = 4,
- VK_OBJECT_TYPE_SEMAPHORE = 5,
- VK_OBJECT_TYPE_COMMAND_BUFFER = 6,
- VK_OBJECT_TYPE_FENCE = 7,
- VK_OBJECT_TYPE_DEVICE_MEMORY = 8,
- VK_OBJECT_TYPE_BUFFER = 9,
- VK_OBJECT_TYPE_IMAGE = 10,
- VK_OBJECT_TYPE_EVENT = 11,
- VK_OBJECT_TYPE_QUERY_POOL = 12,
- VK_OBJECT_TYPE_BUFFER_VIEW = 13,
- VK_OBJECT_TYPE_IMAGE_VIEW = 14,
- VK_OBJECT_TYPE_SHADER_MODULE = 15,
- VK_OBJECT_TYPE_PIPELINE_CACHE = 16,
- VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17,
- VK_OBJECT_TYPE_RENDER_PASS = 18,
- VK_OBJECT_TYPE_PIPELINE = 19,
- VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20,
- VK_OBJECT_TYPE_SAMPLER = 21,
- VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22,
- VK_OBJECT_TYPE_DESCRIPTOR_SET = 23,
- VK_OBJECT_TYPE_FRAMEBUFFER = 24,
- VK_OBJECT_TYPE_COMMAND_POOL = 25,
- VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000,
- VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000,
- VK_OBJECT_TYPE_SURFACE_KHR = 1000000000,
- VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000,
- VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000,
- VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001,
- VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000,
- VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000,
- VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001,
- VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000,
- VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000,
- VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
- VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
- VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
- VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
- VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN,
- VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL,
- VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1),
- VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
-} VkObjectType;
+typedef enum VkAccessFlagBits {
+ VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001,
+ VK_ACCESS_INDEX_READ_BIT = 0x00000002,
+ VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004,
+ VK_ACCESS_UNIFORM_READ_BIT = 0x00000008,
+ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010,
+ VK_ACCESS_SHADER_READ_BIT = 0x00000020,
+ VK_ACCESS_SHADER_WRITE_BIT = 0x00000040,
+ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400,
+ VK_ACCESS_TRANSFER_READ_BIT = 0x00000800,
+ VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000,
+ VK_ACCESS_HOST_READ_BIT = 0x00002000,
+ VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
+ VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
+ VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
+ VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000,
+ VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000,
+ VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000,
+ VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000,
+ VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000,
+ VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000,
+ VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000,
+ VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000,
+ VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
+ VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000,
+ VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000,
+ VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+ VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+ VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAccessFlagBits;
+typedef VkFlags VkAccessFlags;
-typedef enum VkVendorId {
- VK_VENDOR_ID_VIV = 0x10001,
- VK_VENDOR_ID_VSI = 0x10002,
- VK_VENDOR_ID_KAZAN = 0x10003,
- VK_VENDOR_ID_BEGIN_RANGE = VK_VENDOR_ID_VIV,
- VK_VENDOR_ID_END_RANGE = VK_VENDOR_ID_KAZAN,
- VK_VENDOR_ID_RANGE_SIZE = (VK_VENDOR_ID_KAZAN - VK_VENDOR_ID_VIV + 1),
- VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF
-} VkVendorId;
-typedef VkFlags VkInstanceCreateFlags;
+typedef enum VkImageAspectFlagBits {
+ VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001,
+ VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,
+ VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004,
+ VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008,
+ VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010,
+ VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020,
+ VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040,
+ VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080,
+ VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100,
+ VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200,
+ VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400,
+ VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
+ VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
+ VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT,
+ VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageAspectFlagBits;
+typedef VkFlags VkImageAspectFlags;
typedef enum VkFormatFeatureFlagBits {
VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
@@ -1511,6 +1539,7 @@ typedef enum VkFormatFeatureFlagBits {
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000,
+ VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000,
VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000,
VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
@@ -1527,21 +1556,6 @@ typedef enum VkFormatFeatureFlagBits {
} VkFormatFeatureFlagBits;
typedef VkFlags VkFormatFeatureFlags;
-typedef enum VkImageUsageFlagBits {
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001,
- VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002,
- VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
- VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020,
- VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
- VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
- VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100,
- VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200,
- VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkImageUsageFlagBits;
-typedef VkFlags VkImageUsageFlags;
-
typedef enum VkImageCreateFlagBits {
VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001,
VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
@@ -1580,15 +1594,29 @@ typedef enum VkSampleCountFlagBits {
} VkSampleCountFlagBits;
typedef VkFlags VkSampleCountFlags;
-typedef enum VkQueueFlagBits {
- VK_QUEUE_GRAPHICS_BIT = 0x00000001,
- VK_QUEUE_COMPUTE_BIT = 0x00000002,
- VK_QUEUE_TRANSFER_BIT = 0x00000004,
- VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008,
- VK_QUEUE_PROTECTED_BIT = 0x00000010,
- VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkQueueFlagBits;
-typedef VkFlags VkQueueFlags;
+typedef enum VkImageUsageFlagBits {
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002,
+ VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
+ VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020,
+ VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
+ VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
+ VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100,
+ VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200,
+ VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageUsageFlagBits;
+typedef VkFlags VkImageUsageFlags;
+typedef VkFlags VkInstanceCreateFlags;
+
+typedef enum VkMemoryHeapFlagBits {
+ VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001,
+ VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002,
+ VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+ VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryHeapFlagBits;
+typedef VkFlags VkMemoryHeapFlags;
typedef enum VkMemoryPropertyFlagBits {
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001,
@@ -1603,13 +1631,15 @@ typedef enum VkMemoryPropertyFlagBits {
} VkMemoryPropertyFlagBits;
typedef VkFlags VkMemoryPropertyFlags;
-typedef enum VkMemoryHeapFlagBits {
- VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001,
- VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002,
- VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
- VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkMemoryHeapFlagBits;
-typedef VkFlags VkMemoryHeapFlags;
+typedef enum VkQueueFlagBits {
+ VK_QUEUE_GRAPHICS_BIT = 0x00000001,
+ VK_QUEUE_COMPUTE_BIT = 0x00000002,
+ VK_QUEUE_TRANSFER_BIT = 0x00000004,
+ VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008,
+ VK_QUEUE_PROTECTED_BIT = 0x00000010,
+ VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueueFlagBits;
+typedef VkFlags VkQueueFlags;
typedef VkFlags VkDeviceCreateFlags;
typedef enum VkDeviceQueueCreateFlagBits {
@@ -1638,36 +1668,25 @@ typedef enum VkPipelineStageFlagBits {
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000,
VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000,
VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000,
- VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000,
+ VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000,
+ VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000,
VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000,
- VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = 0x00200000,
- VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000,
VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000,
VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000,
VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
+ VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000,
+ VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+ VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkPipelineStageFlagBits;
typedef VkFlags VkPipelineStageFlags;
typedef VkFlags VkMemoryMapFlags;
-typedef enum VkImageAspectFlagBits {
- VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001,
- VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,
- VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004,
- VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008,
- VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010,
- VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020,
- VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040,
- VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080,
- VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100,
- VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200,
- VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400,
- VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
- VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
- VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT,
- VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkImageAspectFlagBits;
-typedef VkFlags VkImageAspectFlags;
+typedef enum VkSparseMemoryBindFlagBits {
+ VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001,
+ VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseMemoryBindFlagBits;
+typedef VkFlags VkSparseMemoryBindFlags;
typedef enum VkSparseImageFormatFlagBits {
VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001,
@@ -1677,12 +1696,6 @@ typedef enum VkSparseImageFormatFlagBits {
} VkSparseImageFormatFlagBits;
typedef VkFlags VkSparseImageFormatFlags;
-typedef enum VkSparseMemoryBindFlagBits {
- VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001,
- VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkSparseMemoryBindFlagBits;
-typedef VkFlags VkSparseMemoryBindFlags;
-
typedef enum VkFenceCreateFlagBits {
VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001,
VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
@@ -1690,7 +1703,6 @@ typedef enum VkFenceCreateFlagBits {
typedef VkFlags VkFenceCreateFlags;
typedef VkFlags VkSemaphoreCreateFlags;
typedef VkFlags VkEventCreateFlags;
-typedef VkFlags VkQueryPoolCreateFlags;
typedef enum VkQueryPipelineStatisticFlagBits {
VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001,
@@ -1707,6 +1719,7 @@ typedef enum VkQueryPipelineStatisticFlagBits {
VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkQueryPipelineStatisticFlagBits;
typedef VkFlags VkQueryPipelineStatisticFlags;
+typedef VkFlags VkQueryPoolCreateFlags;
typedef enum VkQueryResultFlagBits {
VK_QUERY_RESULT_64_BIT = 0x00000001,
@@ -1743,7 +1756,8 @@ typedef enum VkBufferUsageFlagBits {
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800,
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000,
VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400,
+ VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR = 0x00000400,
+ VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR,
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
@@ -1753,6 +1767,7 @@ typedef VkFlags VkBufferViewCreateFlags;
typedef enum VkImageViewCreateFlagBits {
VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001,
+ VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002,
VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkImageViewCreateFlagBits;
typedef VkFlags VkImageViewCreateFlags;
@@ -1761,17 +1776,41 @@ typedef enum VkShaderModuleCreateFlagBits {
VK_SHADER_MODULE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkShaderModuleCreateFlagBits;
typedef VkFlags VkShaderModuleCreateFlags;
+
+typedef enum VkPipelineCacheCreateFlagBits {
+ VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = 0x00000001,
+ VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheCreateFlagBits;
typedef VkFlags VkPipelineCacheCreateFlags;
+typedef enum VkColorComponentFlagBits {
+ VK_COLOR_COMPONENT_R_BIT = 0x00000001,
+ VK_COLOR_COMPONENT_G_BIT = 0x00000002,
+ VK_COLOR_COMPONENT_B_BIT = 0x00000004,
+ VK_COLOR_COMPONENT_A_BIT = 0x00000008,
+ VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkColorComponentFlagBits;
+typedef VkFlags VkColorComponentFlags;
+
typedef enum VkPipelineCreateFlagBits {
VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001,
VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008,
VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010,
+ VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000,
+ VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000,
+ VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000,
+ VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000,
+ VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000,
+ VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000,
VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020,
VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040,
VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080,
+ VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000,
+ VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800,
+ VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = 0x00000100,
+ VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = 0x00000200,
VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
@@ -1795,21 +1834,22 @@ typedef enum VkShaderStageFlagBits {
VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020,
VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F,
VK_SHADER_STAGE_ALL = 0x7FFFFFFF,
- VK_SHADER_STAGE_RAYGEN_BIT_NV = 0x00000100,
- VK_SHADER_STAGE_ANY_HIT_BIT_NV = 0x00000200,
- VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = 0x00000400,
- VK_SHADER_STAGE_MISS_BIT_NV = 0x00000800,
- VK_SHADER_STAGE_INTERSECTION_BIT_NV = 0x00001000,
- VK_SHADER_STAGE_CALLABLE_BIT_NV = 0x00002000,
+ VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100,
+ VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200,
+ VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400,
+ VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800,
+ VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000,
+ VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000,
VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040,
VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080,
+ VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+ VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+ VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+ VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR,
+ VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+ VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkShaderStageFlagBits;
-typedef VkFlags VkPipelineVertexInputStateCreateFlags;
-typedef VkFlags VkPipelineInputAssemblyStateCreateFlags;
-typedef VkFlags VkPipelineTessellationStateCreateFlags;
-typedef VkFlags VkPipelineViewportStateCreateFlags;
-typedef VkFlags VkPipelineRasterizationStateCreateFlags;
typedef enum VkCullModeFlagBits {
VK_CULL_MODE_NONE = 0,
@@ -1819,18 +1859,14 @@ typedef enum VkCullModeFlagBits {
VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkCullModeFlagBits;
typedef VkFlags VkCullModeFlags;
+typedef VkFlags VkPipelineVertexInputStateCreateFlags;
+typedef VkFlags VkPipelineInputAssemblyStateCreateFlags;
+typedef VkFlags VkPipelineTessellationStateCreateFlags;
+typedef VkFlags VkPipelineViewportStateCreateFlags;
+typedef VkFlags VkPipelineRasterizationStateCreateFlags;
typedef VkFlags VkPipelineMultisampleStateCreateFlags;
typedef VkFlags VkPipelineDepthStencilStateCreateFlags;
typedef VkFlags VkPipelineColorBlendStateCreateFlags;
-
-typedef enum VkColorComponentFlagBits {
- VK_COLOR_COMPONENT_R_BIT = 0x00000001,
- VK_COLOR_COMPONENT_G_BIT = 0x00000002,
- VK_COLOR_COMPONENT_B_BIT = 0x00000004,
- VK_COLOR_COMPONENT_A_BIT = 0x00000008,
- VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkColorComponentFlagBits;
-typedef VkFlags VkColorComponentFlags;
typedef VkFlags VkPipelineDynamicStateCreateFlags;
typedef VkFlags VkPipelineLayoutCreateFlags;
typedef VkFlags VkShaderStageFlags;
@@ -1842,14 +1878,6 @@ typedef enum VkSamplerCreateFlagBits {
} VkSamplerCreateFlagBits;
typedef VkFlags VkSamplerCreateFlags;
-typedef enum VkDescriptorSetLayoutCreateFlagBits {
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002,
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkDescriptorSetLayoutCreateFlagBits;
-typedef VkFlags VkDescriptorSetLayoutCreateFlags;
-
typedef enum VkDescriptorPoolCreateFlagBits {
VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002,
@@ -1859,6 +1887,30 @@ typedef enum VkDescriptorPoolCreateFlagBits {
typedef VkFlags VkDescriptorPoolCreateFlags;
typedef VkFlags VkDescriptorPoolResetFlags;
+typedef enum VkDescriptorSetLayoutCreateFlagBits {
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorSetLayoutCreateFlagBits;
+typedef VkFlags VkDescriptorSetLayoutCreateFlags;
+
+typedef enum VkAttachmentDescriptionFlagBits {
+ VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001,
+ VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentDescriptionFlagBits;
+typedef VkFlags VkAttachmentDescriptionFlags;
+
+typedef enum VkDependencyFlagBits {
+ VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
+ VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004,
+ VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
+ VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+ VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+ VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDependencyFlagBits;
+typedef VkFlags VkDependencyFlags;
+
typedef enum VkFramebufferCreateFlagBits {
VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001,
VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
@@ -1867,66 +1919,20 @@ typedef enum VkFramebufferCreateFlagBits {
typedef VkFlags VkFramebufferCreateFlags;
typedef enum VkRenderPassCreateFlagBits {
+ VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002,
VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkRenderPassCreateFlagBits;
typedef VkFlags VkRenderPassCreateFlags;
-typedef enum VkAttachmentDescriptionFlagBits {
- VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001,
- VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkAttachmentDescriptionFlagBits;
-typedef VkFlags VkAttachmentDescriptionFlags;
-
typedef enum VkSubpassDescriptionFlagBits {
VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001,
VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002,
+ VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004,
+ VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008,
VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkSubpassDescriptionFlagBits;
typedef VkFlags VkSubpassDescriptionFlags;
-typedef enum VkAccessFlagBits {
- VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001,
- VK_ACCESS_INDEX_READ_BIT = 0x00000002,
- VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004,
- VK_ACCESS_UNIFORM_READ_BIT = 0x00000008,
- VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010,
- VK_ACCESS_SHADER_READ_BIT = 0x00000020,
- VK_ACCESS_SHADER_WRITE_BIT = 0x00000040,
- VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100,
- VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200,
- VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400,
- VK_ACCESS_TRANSFER_READ_BIT = 0x00000800,
- VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000,
- VK_ACCESS_HOST_READ_BIT = 0x00002000,
- VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
- VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
- VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
- VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000,
- VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000,
- VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000,
- VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000,
- VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000,
- VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000,
- VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000,
- VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000,
- VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000,
- VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000,
- VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
- VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkAccessFlagBits;
-typedef VkFlags VkAccessFlags;
-
-typedef enum VkDependencyFlagBits {
- VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
- VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004,
- VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
- VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
- VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
- VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkDependencyFlagBits;
-typedef VkFlags VkDependencyFlags;
-
typedef enum VkCommandPoolCreateFlagBits {
VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001,
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002,
@@ -1969,36 +1975,106 @@ typedef enum VkStencilFaceFlagBits {
VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkStencilFaceFlagBits;
typedef VkFlags VkStencilFaceFlags;
-typedef struct VkApplicationInfo {
+typedef struct VkExtent2D {
+ uint32_t width;
+ uint32_t height;
+} VkExtent2D;
+
+typedef struct VkExtent3D {
+ uint32_t width;
+ uint32_t height;
+ uint32_t depth;
+} VkExtent3D;
+
+typedef struct VkOffset2D {
+ int32_t x;
+ int32_t y;
+} VkOffset2D;
+
+typedef struct VkOffset3D {
+ int32_t x;
+ int32_t y;
+ int32_t z;
+} VkOffset3D;
+
+typedef struct VkRect2D {
+ VkOffset2D offset;
+ VkExtent2D extent;
+} VkRect2D;
+
+typedef struct VkBaseInStructure {
+ VkStructureType sType;
+ const struct VkBaseInStructure* pNext;
+} VkBaseInStructure;
+
+typedef struct VkBaseOutStructure {
+ VkStructureType sType;
+ struct VkBaseOutStructure* pNext;
+} VkBaseOutStructure;
+
+typedef struct VkBufferMemoryBarrier {
VkStructureType sType;
const void* pNext;
- const char* pApplicationName;
- uint32_t applicationVersion;
- const char* pEngineName;
- uint32_t engineVersion;
- uint32_t apiVersion;
-} VkApplicationInfo;
+ VkAccessFlags srcAccessMask;
+ VkAccessFlags dstAccessMask;
+ uint32_t srcQueueFamilyIndex;
+ uint32_t dstQueueFamilyIndex;
+ VkBuffer buffer;
+ VkDeviceSize offset;
+ VkDeviceSize size;
+} VkBufferMemoryBarrier;
-typedef struct VkInstanceCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkInstanceCreateFlags flags;
- const VkApplicationInfo* pApplicationInfo;
- uint32_t enabledLayerCount;
- const char* const* ppEnabledLayerNames;
- uint32_t enabledExtensionCount;
- const char* const* ppEnabledExtensionNames;
-} VkInstanceCreateInfo;
+typedef struct VkDispatchIndirectCommand {
+ uint32_t x;
+ uint32_t y;
+ uint32_t z;
+} VkDispatchIndirectCommand;
-typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
- void* pUserData,
- size_t size,
- size_t alignment,
- VkSystemAllocationScope allocationScope);
+typedef struct VkDrawIndexedIndirectCommand {
+ uint32_t indexCount;
+ uint32_t instanceCount;
+ uint32_t firstIndex;
+ int32_t vertexOffset;
+ uint32_t firstInstance;
+} VkDrawIndexedIndirectCommand;
-typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
+typedef struct VkDrawIndirectCommand {
+ uint32_t vertexCount;
+ uint32_t instanceCount;
+ uint32_t firstVertex;
+ uint32_t firstInstance;
+} VkDrawIndirectCommand;
+
+typedef struct VkImageSubresourceRange {
+ VkImageAspectFlags aspectMask;
+ uint32_t baseMipLevel;
+ uint32_t levelCount;
+ uint32_t baseArrayLayer;
+ uint32_t layerCount;
+} VkImageSubresourceRange;
+
+typedef struct VkImageMemoryBarrier {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccessFlags srcAccessMask;
+ VkAccessFlags dstAccessMask;
+ VkImageLayout oldLayout;
+ VkImageLayout newLayout;
+ uint32_t srcQueueFamilyIndex;
+ uint32_t dstQueueFamilyIndex;
+ VkImage image;
+ VkImageSubresourceRange subresourceRange;
+} VkImageMemoryBarrier;
+
+typedef struct VkMemoryBarrier {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccessFlags srcAccessMask;
+ VkAccessFlags dstAccessMask;
+} VkMemoryBarrier;
+
+typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
void* pUserData,
- void* pOriginal,
size_t size,
size_t alignment,
VkSystemAllocationScope allocationScope);
@@ -2019,6 +2095,14 @@ typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)(
VkInternalAllocationType allocationType,
VkSystemAllocationScope allocationScope);
+typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
+ void* pUserData,
+ void* pOriginal,
+ size_t size,
+ size_t alignment,
+ VkSystemAllocationScope allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
typedef struct VkAllocationCallbacks {
void* pUserData;
PFN_vkAllocationFunction pfnAllocation;
@@ -2028,6 +2112,51 @@ typedef struct VkAllocationCallbacks {
PFN_vkInternalFreeNotification pfnInternalFree;
} VkAllocationCallbacks;
+typedef struct VkApplicationInfo {
+ VkStructureType sType;
+ const void* pNext;
+ const char* pApplicationName;
+ uint32_t applicationVersion;
+ const char* pEngineName;
+ uint32_t engineVersion;
+ uint32_t apiVersion;
+} VkApplicationInfo;
+
+typedef struct VkFormatProperties {
+ VkFormatFeatureFlags linearTilingFeatures;
+ VkFormatFeatureFlags optimalTilingFeatures;
+ VkFormatFeatureFlags bufferFeatures;
+} VkFormatProperties;
+
+typedef struct VkImageFormatProperties {
+ VkExtent3D maxExtent;
+ uint32_t maxMipLevels;
+ uint32_t maxArrayLayers;
+ VkSampleCountFlags sampleCounts;
+ VkDeviceSize maxResourceSize;
+} VkImageFormatProperties;
+
+typedef struct VkInstanceCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkInstanceCreateFlags flags;
+ const VkApplicationInfo* pApplicationInfo;
+ uint32_t enabledLayerCount;
+ const char* const* ppEnabledLayerNames;
+ uint32_t enabledExtensionCount;
+ const char* const* ppEnabledExtensionNames;
+} VkInstanceCreateInfo;
+
+typedef struct VkMemoryHeap {
+ VkDeviceSize size;
+ VkMemoryHeapFlags flags;
+} VkMemoryHeap;
+
+typedef struct VkMemoryType {
+ VkMemoryPropertyFlags propertyFlags;
+ uint32_t heapIndex;
+} VkMemoryType;
+
typedef struct VkPhysicalDeviceFeatures {
VkBool32 robustBufferAccess;
VkBool32 fullDrawIndexUint32;
@@ -2086,26 +2215,6 @@ typedef struct VkPhysicalDeviceFeatures {
VkBool32 inheritedQueries;
} VkPhysicalDeviceFeatures;
-typedef struct VkFormatProperties {
- VkFormatFeatureFlags linearTilingFeatures;
- VkFormatFeatureFlags optimalTilingFeatures;
- VkFormatFeatureFlags bufferFeatures;
-} VkFormatProperties;
-
-typedef struct VkExtent3D {
- uint32_t width;
- uint32_t height;
- uint32_t depth;
-} VkExtent3D;
-
-typedef struct VkImageFormatProperties {
- VkExtent3D maxExtent;
- uint32_t maxMipLevels;
- uint32_t maxArrayLayers;
- VkSampleCountFlags sampleCounts;
- VkDeviceSize maxResourceSize;
-} VkImageFormatProperties;
-
typedef struct VkPhysicalDeviceLimits {
uint32_t maxImageDimension1D;
uint32_t maxImageDimension2D;
@@ -2215,6 +2324,13 @@ typedef struct VkPhysicalDeviceLimits {
VkDeviceSize nonCoherentAtomSize;
} VkPhysicalDeviceLimits;
+typedef struct VkPhysicalDeviceMemoryProperties {
+ uint32_t memoryTypeCount;
+ VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
+ uint32_t memoryHeapCount;
+ VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryProperties;
+
typedef struct VkPhysicalDeviceSparseProperties {
VkBool32 residencyStandard2DBlockShape;
VkBool32 residencyStandard2DMultisampleBlockShape;
@@ -2242,24 +2358,6 @@ typedef struct VkQueueFamilyProperties {
VkExtent3D minImageTransferGranularity;
} VkQueueFamilyProperties;
-typedef struct VkMemoryType {
- VkMemoryPropertyFlags propertyFlags;
- uint32_t heapIndex;
-} VkMemoryType;
-
-typedef struct VkMemoryHeap {
- VkDeviceSize size;
- VkMemoryHeapFlags flags;
-} VkMemoryHeap;
-
-typedef struct VkPhysicalDeviceMemoryProperties {
- uint32_t memoryTypeCount;
- VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
- uint32_t memoryHeapCount;
- VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
-} VkPhysicalDeviceMemoryProperties;
-
-typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
typedef struct VkDeviceQueueCreateInfo {
VkStructureType sType;
const void* pNext;
@@ -2306,13 +2404,6 @@ typedef struct VkSubmitInfo {
const VkSemaphore* pSignalSemaphores;
} VkSubmitInfo;
-typedef struct VkMemoryAllocateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDeviceSize allocationSize;
- uint32_t memoryTypeIndex;
-} VkMemoryAllocateInfo;
-
typedef struct VkMappedMemoryRange {
VkStructureType sType;
const void* pNext;
@@ -2321,26 +2412,19 @@ typedef struct VkMappedMemoryRange {
VkDeviceSize size;
} VkMappedMemoryRange;
+typedef struct VkMemoryAllocateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceSize allocationSize;
+ uint32_t memoryTypeIndex;
+} VkMemoryAllocateInfo;
+
typedef struct VkMemoryRequirements {
VkDeviceSize size;
VkDeviceSize alignment;
uint32_t memoryTypeBits;
} VkMemoryRequirements;
-typedef struct VkSparseImageFormatProperties {
- VkImageAspectFlags aspectMask;
- VkExtent3D imageGranularity;
- VkSparseImageFormatFlags flags;
-} VkSparseImageFormatProperties;
-
-typedef struct VkSparseImageMemoryRequirements {
- VkSparseImageFormatProperties formatProperties;
- uint32_t imageMipTailFirstLod;
- VkDeviceSize imageMipTailSize;
- VkDeviceSize imageMipTailOffset;
- VkDeviceSize imageMipTailStride;
-} VkSparseImageMemoryRequirements;
-
typedef struct VkSparseMemoryBind {
VkDeviceSize resourceOffset;
VkDeviceSize size;
@@ -2367,12 +2451,6 @@ typedef struct VkImageSubresource {
uint32_t arrayLayer;
} VkImageSubresource;
-typedef struct VkOffset3D {
- int32_t x;
- int32_t y;
- int32_t z;
-} VkOffset3D;
-
typedef struct VkSparseImageMemoryBind {
VkImageSubresource subresource;
VkOffset3D offset;
@@ -2403,6 +2481,20 @@ typedef struct VkBindSparseInfo {
const VkSemaphore* pSignalSemaphores;
} VkBindSparseInfo;
+typedef struct VkSparseImageFormatProperties {
+ VkImageAspectFlags aspectMask;
+ VkExtent3D imageGranularity;
+ VkSparseImageFormatFlags flags;
+} VkSparseImageFormatProperties;
+
+typedef struct VkSparseImageMemoryRequirements {
+ VkSparseImageFormatProperties formatProperties;
+ uint32_t imageMipTailFirstLod;
+ VkDeviceSize imageMipTailSize;
+ VkDeviceSize imageMipTailOffset;
+ VkDeviceSize imageMipTailStride;
+} VkSparseImageMemoryRequirements;
+
typedef struct VkFenceCreateInfo {
VkStructureType sType;
const void* pNext;
@@ -2484,14 +2576,6 @@ typedef struct VkComponentMapping {
VkComponentSwizzle a;
} VkComponentMapping;
-typedef struct VkImageSubresourceRange {
- VkImageAspectFlags aspectMask;
- uint32_t baseMipLevel;
- uint32_t levelCount;
- uint32_t baseArrayLayer;
- uint32_t layerCount;
-} VkImageSubresourceRange;
-
typedef struct VkImageViewCreateInfo {
VkStructureType sType;
const void* pNext;
@@ -2542,6 +2626,16 @@ typedef struct VkPipelineShaderStageCreateInfo {
const VkSpecializationInfo* pSpecializationInfo;
} VkPipelineShaderStageCreateInfo;
+typedef struct VkComputePipelineCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineCreateFlags flags;
+ VkPipelineShaderStageCreateInfo stage;
+ VkPipelineLayout layout;
+ VkPipeline basePipelineHandle;
+ int32_t basePipelineIndex;
+} VkComputePipelineCreateInfo;
+
typedef struct VkVertexInputBindingDescription {
uint32_t binding;
uint32_t stride;
@@ -2589,21 +2683,6 @@ typedef struct VkViewport {
float maxDepth;
} VkViewport;
-typedef struct VkOffset2D {
- int32_t x;
- int32_t y;
-} VkOffset2D;
-
-typedef struct VkExtent2D {
- uint32_t width;
- uint32_t height;
-} VkExtent2D;
-
-typedef struct VkRect2D {
- VkOffset2D offset;
- VkExtent2D extent;
-} VkRect2D;
-
typedef struct VkPipelineViewportStateCreateInfo {
VkStructureType sType;
const void* pNext;
@@ -2719,16 +2798,6 @@ typedef struct VkGraphicsPipelineCreateInfo {
int32_t basePipelineIndex;
} VkGraphicsPipelineCreateInfo;
-typedef struct VkComputePipelineCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCreateFlags flags;
- VkPipelineShaderStageCreateInfo stage;
- VkPipelineLayout layout;
- VkPipeline basePipelineHandle;
- int32_t basePipelineIndex;
-} VkComputePipelineCreateInfo;
-
typedef struct VkPushConstantRange {
VkShaderStageFlags stageFlags;
uint32_t offset;
@@ -2766,21 +2835,29 @@ typedef struct VkSamplerCreateInfo {
VkBool32 unnormalizedCoordinates;
} VkSamplerCreateInfo;
-typedef struct VkDescriptorSetLayoutBinding {
- uint32_t binding;
- VkDescriptorType descriptorType;
- uint32_t descriptorCount;
- VkShaderStageFlags stageFlags;
- const VkSampler* pImmutableSamplers;
-} VkDescriptorSetLayoutBinding;
+typedef struct VkCopyDescriptorSet {
+ VkStructureType sType;
+ const void* pNext;
+ VkDescriptorSet srcSet;
+ uint32_t srcBinding;
+ uint32_t srcArrayElement;
+ VkDescriptorSet dstSet;
+ uint32_t dstBinding;
+ uint32_t dstArrayElement;
+ uint32_t descriptorCount;
+} VkCopyDescriptorSet;
-typedef struct VkDescriptorSetLayoutCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorSetLayoutCreateFlags flags;
- uint32_t bindingCount;
- const VkDescriptorSetLayoutBinding* pBindings;
-} VkDescriptorSetLayoutCreateInfo;
+typedef struct VkDescriptorBufferInfo {
+ VkBuffer buffer;
+ VkDeviceSize offset;
+ VkDeviceSize range;
+} VkDescriptorBufferInfo;
+
+typedef struct VkDescriptorImageInfo {
+ VkSampler sampler;
+ VkImageView imageView;
+ VkImageLayout imageLayout;
+} VkDescriptorImageInfo;
typedef struct VkDescriptorPoolSize {
VkDescriptorType type;
@@ -2804,17 +2881,21 @@ typedef struct VkDescriptorSetAllocateInfo {
const VkDescriptorSetLayout* pSetLayouts;
} VkDescriptorSetAllocateInfo;
-typedef struct VkDescriptorImageInfo {
- VkSampler sampler;
- VkImageView imageView;
- VkImageLayout imageLayout;
-} VkDescriptorImageInfo;
+typedef struct VkDescriptorSetLayoutBinding {
+ uint32_t binding;
+ VkDescriptorType descriptorType;
+ uint32_t descriptorCount;
+ VkShaderStageFlags stageFlags;
+ const VkSampler* pImmutableSamplers;
+} VkDescriptorSetLayoutBinding;
-typedef struct VkDescriptorBufferInfo {
- VkBuffer buffer;
- VkDeviceSize offset;
- VkDeviceSize range;
-} VkDescriptorBufferInfo;
+typedef struct VkDescriptorSetLayoutCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkDescriptorSetLayoutCreateFlags flags;
+ uint32_t bindingCount;
+ const VkDescriptorSetLayoutBinding* pBindings;
+} VkDescriptorSetLayoutCreateInfo;
typedef struct VkWriteDescriptorSet {
VkStructureType sType;
@@ -2829,30 +2910,6 @@ typedef struct VkWriteDescriptorSet {
const VkBufferView* pTexelBufferView;
} VkWriteDescriptorSet;
-typedef struct VkCopyDescriptorSet {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorSet srcSet;
- uint32_t srcBinding;
- uint32_t srcArrayElement;
- VkDescriptorSet dstSet;
- uint32_t dstBinding;
- uint32_t dstArrayElement;
- uint32_t descriptorCount;
-} VkCopyDescriptorSet;
-
-typedef struct VkFramebufferCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkFramebufferCreateFlags flags;
- VkRenderPass renderPass;
- uint32_t attachmentCount;
- const VkImageView* pAttachments;
- uint32_t width;
- uint32_t height;
- uint32_t layers;
-} VkFramebufferCreateInfo;
-
typedef struct VkAttachmentDescription {
VkAttachmentDescriptionFlags flags;
VkFormat format;
@@ -2870,6 +2927,18 @@ typedef struct VkAttachmentReference {
VkImageLayout layout;
} VkAttachmentReference;
+typedef struct VkFramebufferCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkFramebufferCreateFlags flags;
+ VkRenderPass renderPass;
+ uint32_t attachmentCount;
+ const VkImageView* pAttachments;
+ uint32_t width;
+ uint32_t height;
+ uint32_t layers;
+} VkFramebufferCreateInfo;
+
typedef struct VkSubpassDescription {
VkSubpassDescriptionFlags flags;
VkPipelineBindPoint pipelineBindPoint;
@@ -2951,21 +3020,6 @@ typedef struct VkImageSubresourceLayers {
uint32_t layerCount;
} VkImageSubresourceLayers;
-typedef struct VkImageCopy {
- VkImageSubresourceLayers srcSubresource;
- VkOffset3D srcOffset;
- VkImageSubresourceLayers dstSubresource;
- VkOffset3D dstOffset;
- VkExtent3D extent;
-} VkImageCopy;
-
-typedef struct VkImageBlit {
- VkImageSubresourceLayers srcSubresource;
- VkOffset3D srcOffsets[2];
- VkImageSubresourceLayers dstSubresource;
- VkOffset3D dstOffsets[2];
-} VkImageBlit;
-
typedef struct VkBufferImageCopy {
VkDeviceSize bufferOffset;
uint32_t bufferRowLength;
@@ -3003,6 +3057,21 @@ typedef struct VkClearRect {
uint32_t layerCount;
} VkClearRect;
+typedef struct VkImageBlit {
+ VkImageSubresourceLayers srcSubresource;
+ VkOffset3D srcOffsets[2];
+ VkImageSubresourceLayers dstSubresource;
+ VkOffset3D dstOffsets[2];
+} VkImageBlit;
+
+typedef struct VkImageCopy {
+ VkImageSubresourceLayers srcSubresource;
+ VkOffset3D srcOffset;
+ VkImageSubresourceLayers dstSubresource;
+ VkOffset3D dstOffset;
+ VkExtent3D extent;
+} VkImageCopy;
+
typedef struct VkImageResolve {
VkImageSubresourceLayers srcSubresource;
VkOffset3D srcOffset;
@@ -3011,38 +3080,6 @@ typedef struct VkImageResolve {
VkExtent3D extent;
} VkImageResolve;
-typedef struct VkMemoryBarrier {
- VkStructureType sType;
- const void* pNext;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
-} VkMemoryBarrier;
-
-typedef struct VkBufferMemoryBarrier {
- VkStructureType sType;
- const void* pNext;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- uint32_t srcQueueFamilyIndex;
- uint32_t dstQueueFamilyIndex;
- VkBuffer buffer;
- VkDeviceSize offset;
- VkDeviceSize size;
-} VkBufferMemoryBarrier;
-
-typedef struct VkImageMemoryBarrier {
- VkStructureType sType;
- const void* pNext;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- VkImageLayout oldLayout;
- VkImageLayout newLayout;
- uint32_t srcQueueFamilyIndex;
- uint32_t dstQueueFamilyIndex;
- VkImage image;
- VkImageSubresourceRange subresourceRange;
-} VkImageMemoryBarrier;
-
typedef struct VkRenderPassBeginInfo {
VkStructureType sType;
const void* pNext;
@@ -3053,37 +3090,6 @@ typedef struct VkRenderPassBeginInfo {
const VkClearValue* pClearValues;
} VkRenderPassBeginInfo;
-typedef struct VkDispatchIndirectCommand {
- uint32_t x;
- uint32_t y;
- uint32_t z;
-} VkDispatchIndirectCommand;
-
-typedef struct VkDrawIndexedIndirectCommand {
- uint32_t indexCount;
- uint32_t instanceCount;
- uint32_t firstIndex;
- int32_t vertexOffset;
- uint32_t firstInstance;
-} VkDrawIndexedIndirectCommand;
-
-typedef struct VkDrawIndirectCommand {
- uint32_t vertexCount;
- uint32_t instanceCount;
- uint32_t firstVertex;
- uint32_t firstInstance;
-} VkDrawIndirectCommand;
-
-typedef struct VkBaseOutStructure {
- VkStructureType sType;
- struct VkBaseOutStructure* pNext;
-} VkBaseOutStructure;
-
-typedef struct VkBaseInStructure {
- VkStructureType sType;
- const struct VkBaseInStructure* pNext;
-} VkBaseInStructure;
-
typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
@@ -4037,9 +4043,6 @@ typedef enum VkPointClippingBehavior {
VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1,
VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
- VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
- VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
- VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1),
VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF
} VkPointClippingBehavior;
@@ -4048,9 +4051,6 @@ typedef enum VkTessellationDomainOrigin {
VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1,
VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
- VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
- VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
- VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1),
VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF
} VkTessellationDomainOrigin;
@@ -4065,9 +4065,6 @@ typedef enum VkSamplerYcbcrModelConversion {
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
- VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
- VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
- VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1),
VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF
} VkSamplerYcbcrModelConversion;
@@ -4076,9 +4073,6 @@ typedef enum VkSamplerYcbcrRange {
VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1,
VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
- VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
- VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
- VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1),
VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF
} VkSamplerYcbcrRange;
@@ -4087,9 +4081,6 @@ typedef enum VkChromaLocation {
VK_CHROMA_LOCATION_MIDPOINT = 1,
VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN,
VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT,
- VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN,
- VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT,
- VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1),
VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF
} VkChromaLocation;
@@ -4097,9 +4088,6 @@ typedef enum VkDescriptorUpdateTemplateType {
VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0,
VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1,
VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
- VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
- VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
- VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1),
VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorUpdateTemplateType;
@@ -4219,6 +4207,7 @@ typedef enum VkExternalSemaphoreHandleTypeFlagBits {
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010,
+ VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
@@ -4378,8 +4367,6 @@ typedef struct VkMemoryRequirements2 {
VkMemoryRequirements memoryRequirements;
} VkMemoryRequirements2;
-typedef VkMemoryRequirements2 VkMemoryRequirements2KHR;
-
typedef struct VkSparseImageMemoryRequirements2 {
VkStructureType sType;
void* pNext;
@@ -4907,7 +4894,6 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
// Vulkan 1.2 version number
#define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0
-typedef uint64_t VkDeviceAddress;
#define VK_MAX_DRIVER_NAME_SIZE 256
#define VK_MAX_DRIVER_INFO_SIZE 256
@@ -4924,6 +4910,8 @@ typedef enum VkDriverId {
VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10,
VK_DRIVER_ID_GGP_PROPRIETARY = 11,
VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12,
+ VK_DRIVER_ID_MESA_LLVMPIPE = 13,
+ VK_DRIVER_ID_MOLTENVK = 14,
VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY,
VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE,
VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV,
@@ -4936,9 +4924,6 @@ typedef enum VkDriverId {
VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY,
VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
- VK_DRIVER_ID_BEGIN_RANGE = VK_DRIVER_ID_AMD_PROPRIETARY,
- VK_DRIVER_ID_END_RANGE = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
- VK_DRIVER_ID_RANGE_SIZE = (VK_DRIVER_ID_BROADCOM_PROPRIETARY - VK_DRIVER_ID_AMD_PROPRIETARY + 1),
VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF
} VkDriverId;
@@ -4949,9 +4934,6 @@ typedef enum VkShaderFloatControlsIndependence {
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_BEGIN_RANGE = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_END_RANGE = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_RANGE_SIZE = (VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY + 1),
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF
} VkShaderFloatControlsIndependence;
@@ -4962,9 +4944,6 @@ typedef enum VkSamplerReductionMode {
VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN,
VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX,
- VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
- VK_SAMPLER_REDUCTION_MODE_END_RANGE = VK_SAMPLER_REDUCTION_MODE_MAX,
- VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE = (VK_SAMPLER_REDUCTION_MODE_MAX - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE + 1),
VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF
} VkSamplerReductionMode;
@@ -4973,9 +4952,6 @@ typedef enum VkSemaphoreType {
VK_SEMAPHORE_TYPE_TIMELINE = 1,
VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY,
VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE,
- VK_SEMAPHORE_TYPE_BEGIN_RANGE = VK_SEMAPHORE_TYPE_BINARY,
- VK_SEMAPHORE_TYPE_END_RANGE = VK_SEMAPHORE_TYPE_TIMELINE,
- VK_SEMAPHORE_TYPE_RANGE_SIZE = (VK_SEMAPHORE_TYPE_TIMELINE - VK_SEMAPHORE_TYPE_BINARY + 1),
VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkSemaphoreType;
@@ -5662,6 +5638,16 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR)
#define VK_KHR_SURFACE_SPEC_VERSION 25
#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface"
+typedef enum VkPresentModeKHR {
+ VK_PRESENT_MODE_IMMEDIATE_KHR = 0,
+ VK_PRESENT_MODE_MAILBOX_KHR = 1,
+ VK_PRESENT_MODE_FIFO_KHR = 2,
+ VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3,
+ VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000,
+ VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001,
+ VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPresentModeKHR;
+
typedef enum VkColorSpaceKHR {
VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0,
VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001,
@@ -5681,25 +5667,9 @@ typedef enum VkColorSpaceKHR {
VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000,
VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
- VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
- VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
- VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1),
VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF
} VkColorSpaceKHR;
-typedef enum VkPresentModeKHR {
- VK_PRESENT_MODE_IMMEDIATE_KHR = 0,
- VK_PRESENT_MODE_MAILBOX_KHR = 1,
- VK_PRESENT_MODE_FIFO_KHR = 2,
- VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3,
- VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000,
- VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001,
- VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR,
- VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
- VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1),
- VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkPresentModeKHR;
-
typedef enum VkSurfaceTransformFlagBitsKHR {
VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001,
VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002,
@@ -5712,7 +5682,6 @@ typedef enum VkSurfaceTransformFlagBitsKHR {
VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100,
VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkSurfaceTransformFlagBitsKHR;
-typedef VkFlags VkSurfaceTransformFlagsKHR;
typedef enum VkCompositeAlphaFlagBitsKHR {
VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
@@ -5722,6 +5691,7 @@ typedef enum VkCompositeAlphaFlagBitsKHR {
VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkCompositeAlphaFlagBitsKHR;
typedef VkFlags VkCompositeAlphaFlagsKHR;
+typedef VkFlags VkSurfaceTransformFlagsKHR;
typedef struct VkSurfaceCapabilitiesKHR {
uint32_t minImageCount;
uint32_t maxImageCount;
@@ -5941,6 +5911,7 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR)
#define VK_KHR_DISPLAY_SPEC_VERSION 23
#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display"
+typedef VkFlags VkDisplayModeCreateFlagsKHR;
typedef enum VkDisplayPlaneAlphaFlagBitsKHR {
VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
@@ -5950,28 +5921,12 @@ typedef enum VkDisplayPlaneAlphaFlagBitsKHR {
VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkDisplayPlaneAlphaFlagBitsKHR;
typedef VkFlags VkDisplayPlaneAlphaFlagsKHR;
-typedef VkFlags VkDisplayModeCreateFlagsKHR;
typedef VkFlags VkDisplaySurfaceCreateFlagsKHR;
-typedef struct VkDisplayPropertiesKHR {
- VkDisplayKHR display;
- const char* displayName;
- VkExtent2D physicalDimensions;
- VkExtent2D physicalResolution;
- VkSurfaceTransformFlagsKHR supportedTransforms;
- VkBool32 planeReorderPossible;
- VkBool32 persistentContent;
-} VkDisplayPropertiesKHR;
-
typedef struct VkDisplayModeParametersKHR {
VkExtent2D visibleRegion;
uint32_t refreshRate;
} VkDisplayModeParametersKHR;
-typedef struct VkDisplayModePropertiesKHR {
- VkDisplayModeKHR displayMode;
- VkDisplayModeParametersKHR parameters;
-} VkDisplayModePropertiesKHR;
-
typedef struct VkDisplayModeCreateInfoKHR {
VkStructureType sType;
const void* pNext;
@@ -5979,6 +5934,11 @@ typedef struct VkDisplayModeCreateInfoKHR {
VkDisplayModeParametersKHR parameters;
} VkDisplayModeCreateInfoKHR;
+typedef struct VkDisplayModePropertiesKHR {
+ VkDisplayModeKHR displayMode;
+ VkDisplayModeParametersKHR parameters;
+} VkDisplayModePropertiesKHR;
+
typedef struct VkDisplayPlaneCapabilitiesKHR {
VkDisplayPlaneAlphaFlagsKHR supportedAlpha;
VkOffset2D minSrcPosition;
@@ -5996,6 +5956,16 @@ typedef struct VkDisplayPlanePropertiesKHR {
uint32_t currentStackIndex;
} VkDisplayPlanePropertiesKHR;
+typedef struct VkDisplayPropertiesKHR {
+ VkDisplayKHR display;
+ const char* displayName;
+ VkExtent2D physicalDimensions;
+ VkExtent2D physicalResolution;
+ VkSurfaceTransformFlagsKHR supportedTransforms;
+ VkBool32 planeReorderPossible;
+ VkBool32 persistentContent;
+} VkDisplayPropertiesKHR;
+
typedef struct VkDisplaySurfaceCreateInfoKHR {
VkStructureType sType;
const void* pNext;
@@ -6679,9 +6649,6 @@ typedef enum VkPerformanceCounterUnitKHR {
VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8,
VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9,
VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10,
- VK_PERFORMANCE_COUNTER_UNIT_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
- VK_PERFORMANCE_COUNTER_UNIT_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR,
- VK_PERFORMANCE_COUNTER_UNIT_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR - VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR + 1),
VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF
} VkPerformanceCounterUnitKHR;
@@ -6692,9 +6659,6 @@ typedef enum VkPerformanceCounterScopeKHR {
VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
- VK_PERFORMANCE_COUNTER_SCOPE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
- VK_PERFORMANCE_COUNTER_SCOPE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
- VK_PERFORMANCE_COUNTER_SCOPE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR - VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR + 1),
VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF
} VkPerformanceCounterScopeKHR;
@@ -6705,9 +6669,6 @@ typedef enum VkPerformanceCounterStorageKHR {
VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3,
VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4,
VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5,
- VK_PERFORMANCE_COUNTER_STORAGE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
- VK_PERFORMANCE_COUNTER_STORAGE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR,
- VK_PERFORMANCE_COUNTER_STORAGE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR - VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR + 1),
VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF
} VkPerformanceCounterStorageKHR;
@@ -6967,6 +6928,8 @@ typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR;
typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR;
+typedef VkMemoryRequirements2 VkMemoryRequirements2KHR;
+
typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR;
typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
@@ -7303,9 +7266,6 @@ typedef enum VkPipelineExecutableStatisticFormatKHR {
VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1,
VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2,
VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3,
- VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BEGIN_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
- VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_END_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR,
- VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_RANGE_SIZE_KHR = (VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR + 1),
VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF
} VkPipelineExecutableStatisticFormatKHR;
typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
@@ -7387,6 +7347,159 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR
#endif
+#define VK_KHR_shader_non_semantic_info 1
+#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1
+#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info"
+
+
+#define VK_KHR_copy_commands2 1
+#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1
+#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2"
+typedef struct VkBufferCopy2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceSize srcOffset;
+ VkDeviceSize dstOffset;
+ VkDeviceSize size;
+} VkBufferCopy2KHR;
+
+typedef struct VkCopyBufferInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkBuffer srcBuffer;
+ VkBuffer dstBuffer;
+ uint32_t regionCount;
+ const VkBufferCopy2KHR* pRegions;
+} VkCopyBufferInfo2KHR;
+
+typedef struct VkImageCopy2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageSubresourceLayers srcSubresource;
+ VkOffset3D srcOffset;
+ VkImageSubresourceLayers dstSubresource;
+ VkOffset3D dstOffset;
+ VkExtent3D extent;
+} VkImageCopy2KHR;
+
+typedef struct VkCopyImageInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage srcImage;
+ VkImageLayout srcImageLayout;
+ VkImage dstImage;
+ VkImageLayout dstImageLayout;
+ uint32_t regionCount;
+ const VkImageCopy2KHR* pRegions;
+} VkCopyImageInfo2KHR;
+
+typedef struct VkBufferImageCopy2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceSize bufferOffset;
+ uint32_t bufferRowLength;
+ uint32_t bufferImageHeight;
+ VkImageSubresourceLayers imageSubresource;
+ VkOffset3D imageOffset;
+ VkExtent3D imageExtent;
+} VkBufferImageCopy2KHR;
+
+typedef struct VkCopyBufferToImageInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkBuffer srcBuffer;
+ VkImage dstImage;
+ VkImageLayout dstImageLayout;
+ uint32_t regionCount;
+ const VkBufferImageCopy2KHR* pRegions;
+} VkCopyBufferToImageInfo2KHR;
+
+typedef struct VkCopyImageToBufferInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage srcImage;
+ VkImageLayout srcImageLayout;
+ VkBuffer dstBuffer;
+ uint32_t regionCount;
+ const VkBufferImageCopy2KHR* pRegions;
+} VkCopyImageToBufferInfo2KHR;
+
+typedef struct VkImageBlit2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageSubresourceLayers srcSubresource;
+ VkOffset3D srcOffsets[2];
+ VkImageSubresourceLayers dstSubresource;
+ VkOffset3D dstOffsets[2];
+} VkImageBlit2KHR;
+
+typedef struct VkBlitImageInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage srcImage;
+ VkImageLayout srcImageLayout;
+ VkImage dstImage;
+ VkImageLayout dstImageLayout;
+ uint32_t regionCount;
+ const VkImageBlit2KHR* pRegions;
+ VkFilter filter;
+} VkBlitImageInfo2KHR;
+
+typedef struct VkImageResolve2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageSubresourceLayers srcSubresource;
+ VkOffset3D srcOffset;
+ VkImageSubresourceLayers dstSubresource;
+ VkOffset3D dstOffset;
+ VkExtent3D extent;
+} VkImageResolve2KHR;
+
+typedef struct VkResolveImageInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage srcImage;
+ VkImageLayout srcImageLayout;
+ VkImage dstImage;
+ VkImageLayout dstImageLayout;
+ uint32_t regionCount;
+ const VkImageResolve2KHR* pRegions;
+} VkResolveImageInfo2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyBufferInfo2KHR* pCopyBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyImageInfo2KHR* pCopyImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkBlitImageInfo2KHR* pBlitImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkResolveImageInfo2KHR* pResolveImageInfo);
+#endif
+
+
#define VK_EXT_debug_report 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT)
#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9
@@ -7424,19 +7537,15 @@ typedef enum VkDebugReportObjectTypeEXT {
VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28,
VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29,
VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30,
- VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31,
- VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32,
VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33,
VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000,
VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000,
- VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000,
+ VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000165000,
VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1),
+ VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDebugReportObjectTypeEXT;
@@ -7517,9 +7626,6 @@ VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
typedef enum VkRasterizationOrderAMD {
VK_RASTERIZATION_ORDER_STRICT_AMD = 0,
VK_RASTERIZATION_ORDER_RELAXED_AMD = 1,
- VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD,
- VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD,
- VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1),
VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF
} VkRasterizationOrderAMD;
typedef struct VkPipelineRasterizationStateRasterizationOrderAMD {
@@ -7713,7 +7819,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT(
#define VK_NVX_image_view_handle 1
-#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 1
+#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2
#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle"
typedef struct VkImageViewHandleInfoNVX {
VkStructureType sType;
@@ -7723,12 +7829,25 @@ typedef struct VkImageViewHandleInfoNVX {
VkSampler sampler;
} VkImageViewHandleInfoNVX;
+typedef struct VkImageViewAddressPropertiesNVX {
+ VkStructureType sType;
+ void* pNext;
+ VkDeviceAddress deviceAddress;
+ VkDeviceSize size;
+} VkImageViewAddressPropertiesNVX;
+
typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX(
VkDevice device,
const VkImageViewHandleInfoNVX* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX(
+ VkDevice device,
+ VkImageView imageView,
+ VkImageViewAddressPropertiesNVX* pProperties);
#endif
@@ -7793,9 +7912,6 @@ typedef enum VkShaderInfoTypeAMD {
VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0,
VK_SHADER_INFO_TYPE_BINARY_AMD = 1,
VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2,
- VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
- VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD,
- VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1),
VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF
} VkShaderInfoTypeAMD;
typedef struct VkShaderResourceUsageAMD {
@@ -7916,9 +8032,6 @@ typedef struct VkExportMemoryAllocateInfoNV {
typedef enum VkValidationCheckEXT {
VK_VALIDATION_CHECK_ALL_EXT = 0,
VK_VALIDATION_CHECK_SHADERS_EXT = 1,
- VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT,
- VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT,
- VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1),
VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF
} VkValidationCheckEXT;
typedef struct VkValidationFlagsEXT {
@@ -8011,229 +8124,6 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT(
#endif
-#define VK_NVX_device_generated_commands 1
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX)
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX)
-#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3
-#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands"
-
-typedef enum VkIndirectCommandsTokenTypeNVX {
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX,
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1),
- VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
-} VkIndirectCommandsTokenTypeNVX;
-
-typedef enum VkObjectEntryTypeNVX {
- VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0,
- VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1,
- VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2,
- VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3,
- VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4,
- VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
- VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX,
- VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1),
- VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
-} VkObjectEntryTypeNVX;
-
-typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX {
- VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001,
- VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002,
- VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004,
- VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008,
- VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
-} VkIndirectCommandsLayoutUsageFlagBitsNVX;
-typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX;
-
-typedef enum VkObjectEntryUsageFlagBitsNVX {
- VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001,
- VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002,
- VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
-} VkObjectEntryUsageFlagBitsNVX;
-typedef VkFlags VkObjectEntryUsageFlagsNVX;
-typedef struct VkDeviceGeneratedCommandsFeaturesNVX {
- VkStructureType sType;
- const void* pNext;
- VkBool32 computeBindingPointSupport;
-} VkDeviceGeneratedCommandsFeaturesNVX;
-
-typedef struct VkDeviceGeneratedCommandsLimitsNVX {
- VkStructureType sType;
- const void* pNext;
- uint32_t maxIndirectCommandsLayoutTokenCount;
- uint32_t maxObjectEntryCounts;
- uint32_t minSequenceCountBufferOffsetAlignment;
- uint32_t minSequenceIndexBufferOffsetAlignment;
- uint32_t minCommandsTokenBufferOffsetAlignment;
-} VkDeviceGeneratedCommandsLimitsNVX;
-
-typedef struct VkIndirectCommandsTokenNVX {
- VkIndirectCommandsTokenTypeNVX tokenType;
- VkBuffer buffer;
- VkDeviceSize offset;
-} VkIndirectCommandsTokenNVX;
-
-typedef struct VkIndirectCommandsLayoutTokenNVX {
- VkIndirectCommandsTokenTypeNVX tokenType;
- uint32_t bindingUnit;
- uint32_t dynamicCount;
- uint32_t divisor;
-} VkIndirectCommandsLayoutTokenNVX;
-
-typedef struct VkIndirectCommandsLayoutCreateInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkPipelineBindPoint pipelineBindPoint;
- VkIndirectCommandsLayoutUsageFlagsNVX flags;
- uint32_t tokenCount;
- const VkIndirectCommandsLayoutTokenNVX* pTokens;
-} VkIndirectCommandsLayoutCreateInfoNVX;
-
-typedef struct VkCmdProcessCommandsInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkObjectTableNVX objectTable;
- VkIndirectCommandsLayoutNVX indirectCommandsLayout;
- uint32_t indirectCommandsTokenCount;
- const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens;
- uint32_t maxSequencesCount;
- VkCommandBuffer targetCommandBuffer;
- VkBuffer sequencesCountBuffer;
- VkDeviceSize sequencesCountOffset;
- VkBuffer sequencesIndexBuffer;
- VkDeviceSize sequencesIndexOffset;
-} VkCmdProcessCommandsInfoNVX;
-
-typedef struct VkCmdReserveSpaceForCommandsInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkObjectTableNVX objectTable;
- VkIndirectCommandsLayoutNVX indirectCommandsLayout;
- uint32_t maxSequencesCount;
-} VkCmdReserveSpaceForCommandsInfoNVX;
-
-typedef struct VkObjectTableCreateInfoNVX {
- VkStructureType sType;
- const void* pNext;
- uint32_t objectCount;
- const VkObjectEntryTypeNVX* pObjectEntryTypes;
- const uint32_t* pObjectEntryCounts;
- const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
- uint32_t maxUniformBuffersPerDescriptor;
- uint32_t maxStorageBuffersPerDescriptor;
- uint32_t maxStorageImagesPerDescriptor;
- uint32_t maxSampledImagesPerDescriptor;
- uint32_t maxPipelineLayouts;
-} VkObjectTableCreateInfoNVX;
-
-typedef struct VkObjectTableEntryNVX {
- VkObjectEntryTypeNVX type;
- VkObjectEntryUsageFlagsNVX flags;
-} VkObjectTableEntryNVX;
-
-typedef struct VkObjectTablePipelineEntryNVX {
- VkObjectEntryTypeNVX type;
- VkObjectEntryUsageFlagsNVX flags;
- VkPipeline pipeline;
-} VkObjectTablePipelineEntryNVX;
-
-typedef struct VkObjectTableDescriptorSetEntryNVX {
- VkObjectEntryTypeNVX type;
- VkObjectEntryUsageFlagsNVX flags;
- VkPipelineLayout pipelineLayout;
- VkDescriptorSet descriptorSet;
-} VkObjectTableDescriptorSetEntryNVX;
-
-typedef struct VkObjectTableVertexBufferEntryNVX {
- VkObjectEntryTypeNVX type;
- VkObjectEntryUsageFlagsNVX flags;
- VkBuffer buffer;
-} VkObjectTableVertexBufferEntryNVX;
-
-typedef struct VkObjectTableIndexBufferEntryNVX {
- VkObjectEntryTypeNVX type;
- VkObjectEntryUsageFlagsNVX flags;
- VkBuffer buffer;
- VkIndexType indexType;
-} VkObjectTableIndexBufferEntryNVX;
-
-typedef struct VkObjectTablePushConstantEntryNVX {
- VkObjectEntryTypeNVX type;
- VkObjectEntryUsageFlagsNVX flags;
- VkPipelineLayout pipelineLayout;
- VkShaderStageFlags stageFlags;
-} VkObjectTablePushConstantEntryNVX;
-
-typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
-typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable);
-typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator);
-typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices);
-typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices);
-typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits);
-
-#ifndef VK_NO_PROTOTYPES
-VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-
-VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-
-VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable);
-
-VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices);
-
-VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices);
-
-VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits);
-#endif
-
-
#define VK_NV_clip_space_w_scaling 1
#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1
#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling"
@@ -8316,25 +8206,16 @@ typedef enum VkDisplayPowerStateEXT {
VK_DISPLAY_POWER_STATE_OFF_EXT = 0,
VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1,
VK_DISPLAY_POWER_STATE_ON_EXT = 2,
- VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT,
- VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT,
- VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1),
VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDisplayPowerStateEXT;
typedef enum VkDeviceEventTypeEXT {
VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0,
- VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT,
- VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT,
- VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1),
VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDeviceEventTypeEXT;
typedef enum VkDisplayEventTypeEXT {
VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0,
- VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT,
- VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT,
- VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1),
VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDisplayEventTypeEXT;
typedef struct VkDisplayPowerInfoEXT {
@@ -8476,9 +8357,6 @@ typedef enum VkViewportCoordinateSwizzleNV {
VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5,
VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6,
VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7,
- VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
- VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV,
- VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1),
VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF
} VkViewportCoordinateSwizzleNV;
typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV;
@@ -8506,9 +8384,6 @@ typedef struct VkPipelineViewportSwizzleStateCreateInfoNV {
typedef enum VkDiscardRectangleModeEXT {
VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0,
VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1,
- VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
- VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT,
- VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1),
VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDiscardRectangleModeEXT;
typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT;
@@ -8546,9 +8421,6 @@ typedef enum VkConservativeRasterizationModeEXT {
VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0,
VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1,
VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2,
- VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
- VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT,
- VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1),
VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkConservativeRasterizationModeEXT;
typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT;
@@ -8645,10 +8517,9 @@ VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT(
#define VK_EXT_debug_utils 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT)
-#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 1
+#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2
#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils"
typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT;
-typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT;
typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT {
VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001,
@@ -8657,7 +8528,6 @@ typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT {
VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000,
VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDebugUtilsMessageSeverityFlagBitsEXT;
-typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT;
typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001,
@@ -8666,30 +8536,22 @@ typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDebugUtilsMessageTypeFlagBitsEXT;
typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT;
-typedef struct VkDebugUtilsObjectNameInfoEXT {
+typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT;
+typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT;
+typedef struct VkDebugUtilsLabelEXT {
VkStructureType sType;
const void* pNext;
- VkObjectType objectType;
- uint64_t objectHandle;
- const char* pObjectName;
-} VkDebugUtilsObjectNameInfoEXT;
+ const char* pLabelName;
+ float color[4];
+} VkDebugUtilsLabelEXT;
-typedef struct VkDebugUtilsObjectTagInfoEXT {
+typedef struct VkDebugUtilsObjectNameInfoEXT {
VkStructureType sType;
const void* pNext;
VkObjectType objectType;
uint64_t objectHandle;
- uint64_t tagName;
- size_t tagSize;
- const void* pTag;
-} VkDebugUtilsObjectTagInfoEXT;
-
-typedef struct VkDebugUtilsLabelEXT {
- VkStructureType sType;
- const void* pNext;
- const char* pLabelName;
- float color[4];
-} VkDebugUtilsLabelEXT;
+ const char* pObjectName;
+} VkDebugUtilsObjectNameInfoEXT;
typedef struct VkDebugUtilsMessengerCallbackDataEXT {
VkStructureType sType;
@@ -8722,6 +8584,16 @@ typedef struct VkDebugUtilsMessengerCreateInfoEXT {
void* pUserData;
} VkDebugUtilsMessengerCreateInfoEXT;
+typedef struct VkDebugUtilsObjectTagInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkObjectType objectType;
+ uint64_t objectHandle;
+ uint64_t tagName;
+ size_t tagSize;
+ const void* pTag;
+} VkDebugUtilsObjectTagInfoEXT;
+
typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
@@ -8932,9 +8804,6 @@ typedef enum VkBlendOverlapEXT {
VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0,
VK_BLEND_OVERLAP_DISJOINT_EXT = 1,
VK_BLEND_OVERLAP_CONJOINT_EXT = 2,
- VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
- VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT,
- VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1),
VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF
} VkBlendOverlapEXT;
typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
@@ -8987,9 +8856,6 @@ typedef enum VkCoverageModulationModeNV {
VK_COVERAGE_MODULATION_MODE_RGB_NV = 1,
VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2,
VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3,
- VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV,
- VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV,
- VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1),
VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
} VkCoverageModulationModeNV;
typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV;
@@ -9096,9 +8962,6 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT)
typedef enum VkValidationCacheHeaderVersionEXT {
VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1,
- VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT,
- VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT,
- VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1),
VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
} VkValidationCacheHeaderVersionEXT;
typedef VkFlags VkValidationCacheCreateFlagsEXT;
@@ -9188,9 +9051,6 @@ typedef enum VkShadingRatePaletteEntryNV {
VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9,
VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10,
VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11,
- VK_SHADING_RATE_PALETTE_ENTRY_BEGIN_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
- VK_SHADING_RATE_PALETTE_ENTRY_END_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV,
- VK_SHADING_RATE_PALETTE_ENTRY_RANGE_SIZE_NV = (VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV + 1),
VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF
} VkShadingRatePaletteEntryNV;
@@ -9199,9 +9059,6 @@ typedef enum VkCoarseSampleOrderTypeNV {
VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1,
VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2,
VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3,
- VK_COARSE_SAMPLE_ORDER_TYPE_BEGIN_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
- VK_COARSE_SAMPLE_ORDER_TYPE_END_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV,
- VK_COARSE_SAMPLE_ORDER_TYPE_RANGE_SIZE_NV = (VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV + 1),
VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
} VkCoarseSampleOrderTypeNV;
typedef struct VkShadingRatePaletteNV {
@@ -9278,91 +9135,127 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV(
#define VK_NV_ray_tracing 1
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR)
+typedef VkAccelerationStructureKHR VkAccelerationStructureNV;
+
#define VK_NV_RAY_TRACING_SPEC_VERSION 3
#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing"
-#define VK_SHADER_UNUSED_NV (~0U)
-
-typedef enum VkAccelerationStructureTypeNV {
- VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = 0,
- VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = 1,
- VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV,
- VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV,
- VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV + 1),
- VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
-} VkAccelerationStructureTypeNV;
-
-typedef enum VkRayTracingShaderGroupTypeNV {
- VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = 0,
- VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = 1,
- VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = 2,
- VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV,
- VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV,
- VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_NV = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV + 1),
- VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
-} VkRayTracingShaderGroupTypeNV;
-
-typedef enum VkGeometryTypeNV {
- VK_GEOMETRY_TYPE_TRIANGLES_NV = 0,
- VK_GEOMETRY_TYPE_AABBS_NV = 1,
- VK_GEOMETRY_TYPE_BEGIN_RANGE_NV = VK_GEOMETRY_TYPE_TRIANGLES_NV,
- VK_GEOMETRY_TYPE_END_RANGE_NV = VK_GEOMETRY_TYPE_AABBS_NV,
- VK_GEOMETRY_TYPE_RANGE_SIZE_NV = (VK_GEOMETRY_TYPE_AABBS_NV - VK_GEOMETRY_TYPE_TRIANGLES_NV + 1),
- VK_GEOMETRY_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
-} VkGeometryTypeNV;
-
-typedef enum VkCopyAccelerationStructureModeNV {
- VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = 0,
- VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = 1,
- VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV,
- VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV,
- VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_NV = (VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV + 1),
- VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_NV = 0x7FFFFFFF
-} VkCopyAccelerationStructureModeNV;
-
-typedef enum VkAccelerationStructureMemoryRequirementsTypeNV {
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0,
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1,
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2,
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV,
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV + 1),
- VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
-} VkAccelerationStructureMemoryRequirementsTypeNV;
-
-typedef enum VkGeometryFlagBitsNV {
- VK_GEOMETRY_OPAQUE_BIT_NV = 0x00000001,
- VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = 0x00000002,
- VK_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
-} VkGeometryFlagBitsNV;
-typedef VkFlags VkGeometryFlagsNV;
-
-typedef enum VkGeometryInstanceFlagBitsNV {
- VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = 0x00000001,
- VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = 0x00000002,
- VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = 0x00000004,
- VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = 0x00000008,
- VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
-} VkGeometryInstanceFlagBitsNV;
-typedef VkFlags VkGeometryInstanceFlagsNV;
-
-typedef enum VkBuildAccelerationStructureFlagBitsNV {
- VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = 0x00000001,
- VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = 0x00000002,
- VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = 0x00000004,
- VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = 0x00000008,
- VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = 0x00000010,
- VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
-} VkBuildAccelerationStructureFlagBitsNV;
-typedef VkFlags VkBuildAccelerationStructureFlagsNV;
+#define VK_SHADER_UNUSED_KHR (~0U)
+#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR
+
+typedef enum VkRayTracingShaderGroupTypeKHR {
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0,
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1,
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2,
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR,
+ VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkRayTracingShaderGroupTypeKHR;
+typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV;
+
+
+typedef enum VkGeometryTypeKHR {
+ VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0,
+ VK_GEOMETRY_TYPE_AABBS_KHR = 1,
+ VK_GEOMETRY_TYPE_INSTANCES_KHR = 1000150000,
+ VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+ VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR,
+ VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryTypeKHR;
+typedef VkGeometryTypeKHR VkGeometryTypeNV;
+
+
+typedef enum VkAccelerationStructureTypeKHR {
+ VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+ VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
+ VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureTypeKHR;
+typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV;
+
+
+typedef enum VkCopyAccelerationStructureModeKHR {
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0,
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1,
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2,
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3,
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+ VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCopyAccelerationStructureModeKHR;
+typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV;
+
+
+typedef enum VkAccelerationStructureMemoryRequirementsTypeKHR {
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR = 2,
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR,
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR,
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR,
+ VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureMemoryRequirementsTypeKHR;
+typedef VkAccelerationStructureMemoryRequirementsTypeKHR VkAccelerationStructureMemoryRequirementsTypeNV;
+
+
+typedef enum VkGeometryFlagBitsKHR {
+ VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001,
+ VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002,
+ VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR,
+ VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR,
+ VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryFlagBitsKHR;
+typedef VkFlags VkGeometryFlagsKHR;
+typedef VkGeometryFlagsKHR VkGeometryFlagsNV;
+
+typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV;
+
+
+typedef enum VkGeometryInstanceFlagBitsKHR {
+ VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001,
+ VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = 0x00000002,
+ VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004,
+ VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008,
+ VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+ VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+ VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+ VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+ VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryInstanceFlagBitsKHR;
+typedef VkFlags VkGeometryInstanceFlagsKHR;
+typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV;
+
+typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV;
+
+
+typedef enum VkBuildAccelerationStructureFlagBitsKHR {
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002,
+ VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004,
+ VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008,
+ VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+ VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+ VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+ VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+ VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkBuildAccelerationStructureFlagBitsKHR;
+typedef VkFlags VkBuildAccelerationStructureFlagsKHR;
+typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV;
+
+typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV;
+
typedef struct VkRayTracingShaderGroupCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkRayTracingShaderGroupTypeNV type;
- uint32_t generalShader;
- uint32_t closestHitShader;
- uint32_t anyHitShader;
- uint32_t intersectionShader;
+ VkStructureType sType;
+ const void* pNext;
+ VkRayTracingShaderGroupTypeKHR type;
+ uint32_t generalShader;
+ uint32_t closestHitShader;
+ uint32_t anyHitShader;
+ uint32_t intersectionShader;
} VkRayTracingShaderGroupCreateInfoNV;
typedef struct VkRayTracingPipelineCreateInfoNV {
@@ -9410,11 +9303,11 @@ typedef struct VkGeometryDataNV {
} VkGeometryDataNV;
typedef struct VkGeometryNV {
- VkStructureType sType;
- const void* pNext;
- VkGeometryTypeNV geometryType;
- VkGeometryDataNV geometry;
- VkGeometryFlagsNV flags;
+ VkStructureType sType;
+ const void* pNext;
+ VkGeometryTypeKHR geometryType;
+ VkGeometryDataNV geometry;
+ VkGeometryFlagsKHR flags;
} VkGeometryNV;
typedef struct VkAccelerationStructureInfoNV {
@@ -9434,22 +9327,26 @@ typedef struct VkAccelerationStructureCreateInfoNV {
VkAccelerationStructureInfoNV info;
} VkAccelerationStructureCreateInfoNV;
-typedef struct VkBindAccelerationStructureMemoryInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkAccelerationStructureNV accelerationStructure;
- VkDeviceMemory memory;
- VkDeviceSize memoryOffset;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
-} VkBindAccelerationStructureMemoryInfoNV;
-
-typedef struct VkWriteDescriptorSetAccelerationStructureNV {
- VkStructureType sType;
- const void* pNext;
- uint32_t accelerationStructureCount;
- const VkAccelerationStructureNV* pAccelerationStructures;
-} VkWriteDescriptorSetAccelerationStructureNV;
+typedef struct VkBindAccelerationStructureMemoryInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAccelerationStructureKHR accelerationStructure;
+ VkDeviceMemory memory;
+ VkDeviceSize memoryOffset;
+ uint32_t deviceIndexCount;
+ const uint32_t* pDeviceIndices;
+} VkBindAccelerationStructureMemoryInfoKHR;
+
+typedef VkBindAccelerationStructureMemoryInfoKHR VkBindAccelerationStructureMemoryInfoNV;
+
+typedef struct VkWriteDescriptorSetAccelerationStructureKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t accelerationStructureCount;
+ const VkAccelerationStructureKHR* pAccelerationStructures;
+} VkWriteDescriptorSetAccelerationStructureKHR;
+
+typedef VkWriteDescriptorSetAccelerationStructureKHR VkWriteDescriptorSetAccelerationStructureNV;
typedef struct VkAccelerationStructureMemoryRequirementsInfoNV {
VkStructureType sType;
@@ -9471,17 +9368,49 @@ typedef struct VkPhysicalDeviceRayTracingPropertiesNV {
uint32_t maxDescriptorSetAccelerationStructures;
} VkPhysicalDeviceRayTracingPropertiesNV;
+typedef struct VkTransformMatrixKHR {
+ float matrix[3][4];
+} VkTransformMatrixKHR;
+
+typedef VkTransformMatrixKHR VkTransformMatrixNV;
+
+typedef struct VkAabbPositionsKHR {
+ float minX;
+ float minY;
+ float minZ;
+ float maxX;
+ float maxY;
+ float maxZ;
+} VkAabbPositionsKHR;
+
+typedef VkAabbPositionsKHR VkAabbPositionsNV;
+
+typedef struct VkAccelerationStructureInstanceKHR {
+ VkTransformMatrixKHR transform;
+ uint32_t instanceCustomIndex:24;
+ uint32_t mask:8;
+ uint32_t instanceShaderBindingTableRecordOffset:24;
+ VkGeometryInstanceFlagsKHR flags:8;
+ uint64_t accelerationStructureReference;
+} VkAccelerationStructureInstanceKHR;
+
+typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV;
+
typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure);
-typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator);
typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements);
-typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
-typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset);
-typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode);
+typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryKHR)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode);
typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth);
typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
-typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData);
-typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader);
#ifndef VK_NO_PROTOTYPES
@@ -9491,9 +9420,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV(
const VkAllocationCallbacks* pAllocator,
VkAccelerationStructureNV* pAccelerationStructure);
+VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR(
+ VkDevice device,
+ VkAccelerationStructureKHR accelerationStructure,
+ const VkAllocationCallbacks* pAllocator);
+
VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV(
VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
+ VkAccelerationStructureKHR accelerationStructure,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV(
@@ -9501,10 +9435,15 @@ VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV(
const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
VkMemoryRequirements2KHR* pMemoryRequirements);
+VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryKHR(
+ VkDevice device,
+ uint32_t bindInfoCount,
+ const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos);
+
VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV(
VkDevice device,
uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+ const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos);
VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV(
VkCommandBuffer commandBuffer,
@@ -9512,16 +9451,16 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV(
VkBuffer instanceData,
VkDeviceSize instanceOffset,
VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
+ VkAccelerationStructureKHR dst,
+ VkAccelerationStructureKHR src,
VkBuffer scratch,
VkDeviceSize scratchOffset);
VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV(
VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
+ VkAccelerationStructureKHR dst,
+ VkAccelerationStructureKHR src,
+ VkCopyAccelerationStructureModeKHR mode);
VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV(
VkCommandBuffer commandBuffer,
@@ -9548,6 +9487,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV(
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipelines);
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR(
+ VkDevice device,
+ VkPipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void* pData);
+
VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV(
VkDevice device,
VkPipeline pipeline,
@@ -9558,14 +9505,22 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV(
VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV(
VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
+ VkAccelerationStructureKHR accelerationStructure,
size_t dataSize,
void* pData);
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR(
+ VkCommandBuffer commandBuffer,
+ uint32_t accelerationStructureCount,
+ const VkAccelerationStructureKHR* pAccelerationStructures,
+ VkQueryType queryType,
+ VkQueryPool queryPool,
+ uint32_t firstQuery);
+
VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV(
VkCommandBuffer commandBuffer,
uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
+ const VkAccelerationStructureKHR* pAccelerationStructures,
VkQueryType queryType,
VkQueryPool queryPool,
uint32_t firstQuery);
@@ -9612,6 +9567,11 @@ typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT {
+#define VK_QCOM_render_pass_shader_resolve 1
+#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4
+#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve"
+
+
#define VK_EXT_global_priority 1
#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2
#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority"
@@ -9621,9 +9581,6 @@ typedef enum VkQueueGlobalPriorityEXT {
VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256,
VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512,
VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024,
- VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
- VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT,
- VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1),
VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF
} VkQueueGlobalPriorityEXT;
typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT {
@@ -9707,9 +9664,6 @@ typedef enum VkTimeDomainEXT {
VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1,
VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2,
VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3,
- VK_TIME_DOMAIN_BEGIN_RANGE_EXT = VK_TIME_DOMAIN_DEVICE_EXT,
- VK_TIME_DOMAIN_END_RANGE_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT,
- VK_TIME_DOMAIN_RANGE_SIZE_EXT = (VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - VK_TIME_DOMAIN_DEVICE_EXT + 1),
VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF
} VkTimeDomainEXT;
typedef struct VkCalibratedTimestampInfoEXT {
@@ -9768,9 +9722,6 @@ typedef enum VkMemoryOverallocationBehaviorAMD {
VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0,
VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1,
VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2,
- VK_MEMORY_OVERALLOCATION_BEHAVIOR_BEGIN_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
- VK_MEMORY_OVERALLOCATION_BEHAVIOR_END_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD,
- VK_MEMORY_OVERALLOCATION_BEHAVIOR_RANGE_SIZE_AMD = (VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD + 1),
VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF
} VkMemoryOverallocationBehaviorAMD;
typedef struct VkDeviceMemoryOverallocationCreateInfoAMD {
@@ -10008,40 +9959,28 @@ typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
#define VK_INTEL_performance_query 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL)
-#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 1
+#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2
#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query"
typedef enum VkPerformanceConfigurationTypeINTEL {
VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0,
- VK_PERFORMANCE_CONFIGURATION_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL,
- VK_PERFORMANCE_CONFIGURATION_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL,
- VK_PERFORMANCE_CONFIGURATION_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL - VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + 1),
VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
} VkPerformanceConfigurationTypeINTEL;
typedef enum VkQueryPoolSamplingModeINTEL {
VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0,
- VK_QUERY_POOL_SAMPLING_MODE_BEGIN_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL,
- VK_QUERY_POOL_SAMPLING_MODE_END_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL,
- VK_QUERY_POOL_SAMPLING_MODE_RANGE_SIZE_INTEL = (VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL - VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + 1),
VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF
} VkQueryPoolSamplingModeINTEL;
typedef enum VkPerformanceOverrideTypeINTEL {
VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0,
VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1,
- VK_PERFORMANCE_OVERRIDE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
- VK_PERFORMANCE_OVERRIDE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL,
- VK_PERFORMANCE_OVERRIDE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL - VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL + 1),
VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
} VkPerformanceOverrideTypeINTEL;
typedef enum VkPerformanceParameterTypeINTEL {
VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0,
VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1,
- VK_PERFORMANCE_PARAMETER_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
- VK_PERFORMANCE_PARAMETER_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL,
- VK_PERFORMANCE_PARAMETER_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL - VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL + 1),
VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
} VkPerformanceParameterTypeINTEL;
@@ -10051,9 +9990,6 @@ typedef enum VkPerformanceValueTypeINTEL {
VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2,
VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3,
VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4,
- VK_PERFORMANCE_VALUE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
- VK_PERFORMANCE_VALUE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL,
- VK_PERFORMANCE_VALUE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL - VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL + 1),
VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
} VkPerformanceValueTypeINTEL;
typedef union VkPerformanceValueDataINTEL {
@@ -10075,11 +10011,13 @@ typedef struct VkInitializePerformanceApiInfoINTEL {
void* pUserData;
} VkInitializePerformanceApiInfoINTEL;
-typedef struct VkQueryPoolCreateInfoINTEL {
+typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL {
VkStructureType sType;
const void* pNext;
VkQueryPoolSamplingModeINTEL performanceCountersSampling;
-} VkQueryPoolCreateInfoINTEL;
+} VkQueryPoolPerformanceQueryCreateInfoINTEL;
+
+typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL;
typedef struct VkPerformanceMarkerInfoINTEL {
VkStructureType sType;
@@ -10408,16 +10346,15 @@ typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT;
#define VK_EXT_validation_features 1
-#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 2
+#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 4
#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features"
typedef enum VkValidationFeatureEnableEXT {
VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0,
VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1,
VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2,
- VK_VALIDATION_FEATURE_ENABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
- VK_VALIDATION_FEATURE_ENABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
- VK_VALIDATION_FEATURE_ENABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT + 1),
+ VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3,
+ VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4,
VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkValidationFeatureEnableEXT;
@@ -10429,9 +10366,6 @@ typedef enum VkValidationFeatureDisableEXT {
VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4,
VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5,
VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6,
- VK_VALIDATION_FEATURE_DISABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
- VK_VALIDATION_FEATURE_DISABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT,
- VK_VALIDATION_FEATURE_DISABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - VK_VALIDATION_FEATURE_DISABLE_ALL_EXT + 1),
VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkValidationFeatureDisableEXT;
typedef struct VkValidationFeaturesEXT {
@@ -10461,9 +10395,6 @@ typedef enum VkComponentTypeNV {
VK_COMPONENT_TYPE_UINT16_NV = 8,
VK_COMPONENT_TYPE_UINT32_NV = 9,
VK_COMPONENT_TYPE_UINT64_NV = 10,
- VK_COMPONENT_TYPE_BEGIN_RANGE_NV = VK_COMPONENT_TYPE_FLOAT16_NV,
- VK_COMPONENT_TYPE_END_RANGE_NV = VK_COMPONENT_TYPE_UINT64_NV,
- VK_COMPONENT_TYPE_RANGE_SIZE_NV = (VK_COMPONENT_TYPE_UINT64_NV - VK_COMPONENT_TYPE_FLOAT16_NV + 1),
VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
} VkComponentTypeNV;
@@ -10472,9 +10403,6 @@ typedef enum VkScopeNV {
VK_SCOPE_WORKGROUP_NV = 2,
VK_SCOPE_SUBGROUP_NV = 3,
VK_SCOPE_QUEUE_FAMILY_NV = 5,
- VK_SCOPE_BEGIN_RANGE_NV = VK_SCOPE_DEVICE_NV,
- VK_SCOPE_END_RANGE_NV = VK_SCOPE_QUEUE_FAMILY_NV,
- VK_SCOPE_RANGE_SIZE_NV = (VK_SCOPE_QUEUE_FAMILY_NV - VK_SCOPE_DEVICE_NV + 1),
VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF
} VkScopeNV;
typedef struct VkCooperativeMatrixPropertiesNV {
@@ -10520,9 +10448,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
typedef enum VkCoverageReductionModeNV {
VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0,
VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1,
- VK_COVERAGE_REDUCTION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
- VK_COVERAGE_REDUCTION_MODE_END_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV,
- VK_COVERAGE_REDUCTION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV - VK_COVERAGE_REDUCTION_MODE_MERGE_NV + 1),
VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
} VkCoverageReductionModeNV;
typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV;
@@ -10612,9 +10537,6 @@ typedef enum VkLineRasterizationModeEXT {
VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1,
VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2,
VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3,
- VK_LINE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
- VK_LINE_RASTERIZATION_MODE_END_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT,
- VK_LINE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT + 1),
VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkLineRasterizationModeEXT;
typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT {
@@ -10653,6 +10575,28 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(
#endif
+#define VK_EXT_shader_atomic_float 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float"
+typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderBufferFloat32Atomics;
+ VkBool32 shaderBufferFloat32AtomicAdd;
+ VkBool32 shaderBufferFloat64Atomics;
+ VkBool32 shaderBufferFloat64AtomicAdd;
+ VkBool32 shaderSharedFloat32Atomics;
+ VkBool32 shaderSharedFloat32AtomicAdd;
+ VkBool32 shaderSharedFloat64Atomics;
+ VkBool32 shaderSharedFloat64AtomicAdd;
+ VkBool32 shaderImageFloat32Atomics;
+ VkBool32 shaderImageFloat32AtomicAdd;
+ VkBool32 sparseImageFloat32Atomics;
+ VkBool32 sparseImageFloat32AtomicAdd;
+} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+
+
#define VK_EXT_host_query_reset 1
#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1
#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset"
@@ -10680,6 +10624,90 @@ typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT {
+#define VK_EXT_extended_dynamic_state 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state"
+typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 extendedDynamicState;
+} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkCullModeFlags cullMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT(
+ VkCommandBuffer commandBuffer,
+ VkFrontFace frontFace);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT(
+ VkCommandBuffer commandBuffer,
+ VkPrimitiveTopology primitiveTopology);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t viewportCount,
+ const VkViewport* pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t scissorCount,
+ const VkRect2D* pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VkBuffer* pBuffers,
+ const VkDeviceSize* pOffsets,
+ const VkDeviceSize* pSizes,
+ const VkDeviceSize* pStrides);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthWriteEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT(
+ VkCommandBuffer commandBuffer,
+ VkCompareOp depthCompareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthBoundsTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 stencilTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT(
+ VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask,
+ VkStencilOp failOp,
+ VkStencilOp passOp,
+ VkStencilOp depthFailOp,
+ VkCompareOp compareOp);
+#endif
+
+
#define VK_EXT_shader_demote_to_helper_invocation 1
#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1
#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation"
@@ -10691,6 +10719,196 @@ typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT {
+#define VK_NV_device_generated_commands 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV)
+#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3
+#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands"
+
+typedef enum VkIndirectCommandsTokenTypeNV {
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectCommandsTokenTypeNV;
+
+typedef enum VkIndirectStateFlagBitsNV {
+ VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001,
+ VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectStateFlagBitsNV;
+typedef VkFlags VkIndirectStateFlagsNV;
+
+typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV {
+ VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001,
+ VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002,
+ VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004,
+ VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectCommandsLayoutUsageFlagBitsNV;
+typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV;
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxGraphicsShaderGroupCount;
+ uint32_t maxIndirectSequenceCount;
+ uint32_t maxIndirectCommandsTokenCount;
+ uint32_t maxIndirectCommandsStreamCount;
+ uint32_t maxIndirectCommandsTokenOffset;
+ uint32_t maxIndirectCommandsStreamStride;
+ uint32_t minSequencesCountBufferOffsetAlignment;
+ uint32_t minSequencesIndexBufferOffsetAlignment;
+ uint32_t minIndirectCommandsBufferOffsetAlignment;
+} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 deviceGeneratedCommands;
+} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+typedef struct VkGraphicsShaderGroupCreateInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t stageCount;
+ const VkPipelineShaderStageCreateInfo* pStages;
+ const VkPipelineVertexInputStateCreateInfo* pVertexInputState;
+ const VkPipelineTessellationStateCreateInfo* pTessellationState;
+} VkGraphicsShaderGroupCreateInfoNV;
+
+typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t groupCount;
+ const VkGraphicsShaderGroupCreateInfoNV* pGroups;
+ uint32_t pipelineCount;
+ const VkPipeline* pPipelines;
+} VkGraphicsPipelineShaderGroupsCreateInfoNV;
+
+typedef struct VkBindShaderGroupIndirectCommandNV {
+ uint32_t groupIndex;
+} VkBindShaderGroupIndirectCommandNV;
+
+typedef struct VkBindIndexBufferIndirectCommandNV {
+ VkDeviceAddress bufferAddress;
+ uint32_t size;
+ VkIndexType indexType;
+} VkBindIndexBufferIndirectCommandNV;
+
+typedef struct VkBindVertexBufferIndirectCommandNV {
+ VkDeviceAddress bufferAddress;
+ uint32_t size;
+ uint32_t stride;
+} VkBindVertexBufferIndirectCommandNV;
+
+typedef struct VkSetStateFlagsIndirectCommandNV {
+ uint32_t data;
+} VkSetStateFlagsIndirectCommandNV;
+
+typedef struct VkIndirectCommandsStreamNV {
+ VkBuffer buffer;
+ VkDeviceSize offset;
+} VkIndirectCommandsStreamNV;
+
+typedef struct VkIndirectCommandsLayoutTokenNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkIndirectCommandsTokenTypeNV tokenType;
+ uint32_t stream;
+ uint32_t offset;
+ uint32_t vertexBindingUnit;
+ VkBool32 vertexDynamicStride;
+ VkPipelineLayout pushconstantPipelineLayout;
+ VkShaderStageFlags pushconstantShaderStageFlags;
+ uint32_t pushconstantOffset;
+ uint32_t pushconstantSize;
+ VkIndirectStateFlagsNV indirectStateFlags;
+ uint32_t indexTypeCount;
+ const VkIndexType* pIndexTypes;
+ const uint32_t* pIndexTypeValues;
+} VkIndirectCommandsLayoutTokenNV;
+
+typedef struct VkIndirectCommandsLayoutCreateInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkIndirectCommandsLayoutUsageFlagsNV flags;
+ VkPipelineBindPoint pipelineBindPoint;
+ uint32_t tokenCount;
+ const VkIndirectCommandsLayoutTokenNV* pTokens;
+ uint32_t streamCount;
+ const uint32_t* pStreamStrides;
+} VkIndirectCommandsLayoutCreateInfoNV;
+
+typedef struct VkGeneratedCommandsInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineBindPoint pipelineBindPoint;
+ VkPipeline pipeline;
+ VkIndirectCommandsLayoutNV indirectCommandsLayout;
+ uint32_t streamCount;
+ const VkIndirectCommandsStreamNV* pStreams;
+ uint32_t sequencesCount;
+ VkBuffer preprocessBuffer;
+ VkDeviceSize preprocessOffset;
+ VkDeviceSize preprocessSize;
+ VkBuffer sequencesCountBuffer;
+ VkDeviceSize sequencesCountOffset;
+ VkBuffer sequencesIndexBuffer;
+ VkDeviceSize sequencesIndexOffset;
+} VkGeneratedCommandsInfoNV;
+
+typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineBindPoint pipelineBindPoint;
+ VkPipeline pipeline;
+ VkIndirectCommandsLayoutNV indirectCommandsLayout;
+ uint32_t maxSequencesCount;
+} VkGeneratedCommandsMemoryRequirementsInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV(
+ VkDevice device,
+ const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV(
+ VkCommandBuffer commandBuffer,
+ const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 isPreprocessed,
+ const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV(
+ VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline,
+ uint32_t groupIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV(
+ VkDevice device,
+ const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkIndirectCommandsLayoutNV* pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV(
+ VkDevice device,
+ VkIndirectCommandsLayoutNV indirectCommandsLayout,
+ const VkAllocationCallbacks* pAllocator);
+#endif
+
+
#define VK_EXT_texel_buffer_alignment 1
#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1
#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment"
@@ -10711,10 +10929,217 @@ typedef struct VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT {
+#define VK_QCOM_render_pass_transform 1
+#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 1
+#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform"
+typedef struct VkRenderPassTransformBeginInfoQCOM {
+ VkStructureType sType;
+ void* pNext;
+ VkSurfaceTransformFlagBitsKHR transform;
+} VkRenderPassTransformBeginInfoQCOM;
+
+typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM {
+ VkStructureType sType;
+ void* pNext;
+ VkSurfaceTransformFlagBitsKHR transform;
+ VkRect2D renderArea;
+} VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+
+
+#define VK_EXT_robustness2 1
+#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1
+#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2"
+typedef struct VkPhysicalDeviceRobustness2FeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 robustBufferAccess2;
+ VkBool32 robustImageAccess2;
+ VkBool32 nullDescriptor;
+} VkPhysicalDeviceRobustness2FeaturesEXT;
+
+typedef struct VkPhysicalDeviceRobustness2PropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkDeviceSize robustStorageBufferAccessSizeAlignment;
+ VkDeviceSize robustUniformBufferAccessSizeAlignment;
+} VkPhysicalDeviceRobustness2PropertiesEXT;
+
+
+
+#define VK_EXT_custom_border_color 1
+#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12
+#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color"
+typedef struct VkSamplerCustomBorderColorCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkClearColorValue customBorderColor;
+ VkFormat format;
+} VkSamplerCustomBorderColorCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxCustomBorderColorSamplers;
+} VkPhysicalDeviceCustomBorderColorPropertiesEXT;
+
+typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 customBorderColors;
+ VkBool32 customBorderColorWithoutFormat;
+} VkPhysicalDeviceCustomBorderColorFeaturesEXT;
+
+
+
#define VK_GOOGLE_user_type 1
#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1
#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type"
+
+#define VK_EXT_private_data 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlotEXT)
+#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1
+#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data"
+
+typedef enum VkPrivateDataSlotCreateFlagBitsEXT {
+ VK_PRIVATE_DATA_SLOT_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPrivateDataSlotCreateFlagBitsEXT;
+typedef VkFlags VkPrivateDataSlotCreateFlagsEXT;
+typedef struct VkPhysicalDevicePrivateDataFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 privateData;
+} VkPhysicalDevicePrivateDataFeaturesEXT;
+
+typedef struct VkDevicePrivateDataCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t privateDataSlotRequestCount;
+} VkDevicePrivateDataCreateInfoEXT;
+
+typedef struct VkPrivateDataSlotCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkPrivateDataSlotCreateFlagsEXT flags;
+} VkPrivateDataSlotCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot);
+typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data);
+typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT(
+ VkDevice device,
+ const VkPrivateDataSlotCreateInfoEXT* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkPrivateDataSlotEXT* pPrivateDataSlot);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT(
+ VkDevice device,
+ VkPrivateDataSlotEXT privateDataSlot,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT(
+ VkDevice device,
+ VkObjectType objectType,
+ uint64_t objectHandle,
+ VkPrivateDataSlotEXT privateDataSlot,
+ uint64_t data);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT(
+ VkDevice device,
+ VkObjectType objectType,
+ uint64_t objectHandle,
+ VkPrivateDataSlotEXT privateDataSlot,
+ uint64_t* pData);
+#endif
+
+
+#define VK_EXT_pipeline_creation_cache_control 1
+#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3
+#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control"
+typedef struct VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 pipelineCreationCacheControl;
+} VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+
+
+
+#define VK_NV_device_diagnostics_config 1
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 1
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config"
+
+typedef enum VkDeviceDiagnosticsConfigFlagBitsNV {
+ VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001,
+ VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002,
+ VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004,
+ VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkDeviceDiagnosticsConfigFlagBitsNV;
+typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV;
+typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 diagnosticsConfig;
+} VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+typedef struct VkDeviceDiagnosticsConfigCreateInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceDiagnosticsConfigFlagsNV flags;
+} VkDeviceDiagnosticsConfigCreateInfoNV;
+
+
+
+#define VK_QCOM_render_pass_store_ops 1
+#define VK_QCOM_render_pass_store_ops_SPEC_VERSION 2
+#define VK_QCOM_render_pass_store_ops_EXTENSION_NAME "VK_QCOM_render_pass_store_ops"
+
+
+#define VK_EXT_fragment_density_map2 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2"
+typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 fragmentDensityMapDeferred;
+} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 subsampledLoads;
+ VkBool32 subsampledCoarseReconstructionEarlyAccess;
+ uint32_t maxSubsampledArrayLayers;
+ uint32_t maxDescriptorSetSubsampledSamplers;
+} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+
+
+#define VK_EXT_image_robustness 1
+#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1
+#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness"
+typedef struct VkPhysicalDeviceImageRobustnessFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 robustImageAccess;
+} VkPhysicalDeviceImageRobustnessFeaturesEXT;
+
+
+
+#define VK_EXT_4444_formats 1
+#define VK_EXT_4444_FORMATS_SPEC_VERSION 1
+#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats"
+typedef struct VkPhysicalDevice4444FormatsFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 formatA4R4G4B4;
+ VkBool32 formatA4B4G4R4;
+} VkPhysicalDevice4444FormatsFeaturesEXT;
+
+
#ifdef __cplusplus
}
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_directfb.h b/thirdparty/vulkan/include/vulkan/vulkan_directfb.h
new file mode 100644
index 0000000000..f75bd3a4e1
--- /dev/null
+++ b/thirdparty/vulkan/include/vulkan/vulkan_directfb.h
@@ -0,0 +1,54 @@
+#ifndef VULKAN_DIRECTFB_H_
+#define VULKAN_DIRECTFB_H_ 1
+
+/*
+** Copyright (c) 2015-2020 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_directfb_surface 1
+#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1
+#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface"
+typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT;
+typedef struct VkDirectFBSurfaceCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkDirectFBSurfaceCreateFlagsEXT flags;
+ IDirectFB* dfb;
+ IDirectFBSurface* surface;
+} VkDirectFBSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(
+ VkInstance instance,
+ const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSurfaceKHR* pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ IDirectFB* dfb);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h b/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h
index 81ebe55d31..03e27cb0af 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h
@@ -2,19 +2,9 @@
#define VULKAN_FUCHSIA_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_ggp.h b/thirdparty/vulkan/include/vulkan/vulkan_ggp.h
index fd306131c3..273c88005b 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_ggp.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_ggp.h
@@ -2,19 +2,9 @@
#define VULKAN_GGP_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_ios.h b/thirdparty/vulkan/include/vulkan/vulkan_ios.h
index 72ef1a8a82..446a2698d9 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_ios.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_ios.h
@@ -2,19 +2,9 @@
#define VULKAN_IOS_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
@@ -30,7 +20,7 @@ extern "C" {
#define VK_MVK_ios_surface 1
-#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2
+#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3
#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface"
typedef VkFlags VkIOSSurfaceCreateFlagsMVK;
typedef struct VkIOSSurfaceCreateInfoMVK {
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_macos.h b/thirdparty/vulkan/include/vulkan/vulkan_macos.h
index e6e5deaa36..35fcabe39b 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_macos.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_macos.h
@@ -2,19 +2,9 @@
#define VULKAN_MACOS_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
@@ -30,7 +20,7 @@ extern "C" {
#define VK_MVK_macos_surface 1
-#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2
+#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3
#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface"
typedef VkFlags VkMacOSSurfaceCreateFlagsMVK;
typedef struct VkMacOSSurfaceCreateInfoMVK {
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_metal.h b/thirdparty/vulkan/include/vulkan/vulkan_metal.h
index 3dec68c771..99f097d954 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_metal.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_metal.h
@@ -2,19 +2,9 @@
#define VULKAN_METAL_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_vi.h b/thirdparty/vulkan/include/vulkan/vulkan_vi.h
index 6fb66f9dd2..2e62d7d3a7 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_vi.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_vi.h
@@ -2,19 +2,9 @@
#define VULKAN_VI_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_wayland.h b/thirdparty/vulkan/include/vulkan/vulkan_wayland.h
index 599d05b24a..f7b307e519 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_wayland.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_wayland.h
@@ -2,19 +2,9 @@
#define VULKAN_WAYLAND_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_win32.h b/thirdparty/vulkan/include/vulkan/vulkan_win32.h
index 20a1dc0e58..4b561ea10a 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_win32.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_win32.h
@@ -2,19 +2,9 @@
#define VULKAN_WIN32_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
@@ -272,9 +262,6 @@ typedef enum VkFullScreenExclusiveEXT {
VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1,
VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2,
VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3,
- VK_FULL_SCREEN_EXCLUSIVE_BEGIN_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
- VK_FULL_SCREEN_EXCLUSIVE_END_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT,
- VK_FULL_SCREEN_EXCLUSIVE_RANGE_SIZE_EXT = (VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT - VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT + 1),
VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkFullScreenExclusiveEXT;
typedef struct VkSurfaceFullScreenExclusiveInfoEXT {
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_xcb.h b/thirdparty/vulkan/include/vulkan/vulkan_xcb.h
index 4cc0bc0cec..c5441b239d 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_xcb.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_xcb.h
@@ -2,19 +2,9 @@
#define VULKAN_XCB_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_xlib.h b/thirdparty/vulkan/include/vulkan/vulkan_xlib.h
index ee2b48accb..c54628a7ec 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_xlib.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_xlib.h
@@ -2,19 +2,9 @@
#define VULKAN_XLIB_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h b/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h
index 08c4fd729c..436432f842 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h
@@ -2,19 +2,9 @@
#define VULKAN_XLIB_XRANDR_H_ 1
/*
-** Copyright (c) 2015-2019 The Khronos Group Inc.
+** Copyright (c) 2015-2020 The Khronos Group Inc.
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
+** SPDX-License-Identifier: Apache-2.0
*/
/*